def process_message(msg): timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() vote_data = json.loads(msg.body) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.error(e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def process_message(msg): timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() vote_data = json.loads(msg.body) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def process_message(msg): from r2.lib.db.queries import ( add_queries, add_to_commentstree_q, get_comments, ) vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("comment_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) comment = Comment._by_fullname(vote_data.pop("thing_fullname")) print "Processing vote by %s on %s %s" % (user, comment, vote_data) try: vote = Vote( user, comment, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score comment_valid = not (comment._spam or comment._deleted) if vote_valid and comment_valid: author = Account._byID(comment.author_id) add_queries( queries=[get_comments(author, sort, 'all') for sort in SORTS], insert_items=comment, ) timer.intermediate("author_queries") # update the score periodically when a comment has many votes update_threshold = g.live_config['comment_vote_update_threshold'] update_period = g.live_config['comment_vote_update_period'] num_votes = comment.num_votes if num_votes <= update_threshold or num_votes % update_period == 0: add_to_commentstree_q(comment) timer.stop() timer.flush()
def process_message(msg): vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("link_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) link = Link._by_fullname(vote_data.pop("thing_fullname")) # create the vote and update the voter's liked/disliked under lock so # that the vote state and cached query are consistent lock_key = "vote-%s-%s" % (user._id36, link._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, link, vote_data) try: vote = Vote( user, link, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), # CUSTOM: voting model vote_direction=vote_data["vote_direction"], ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") update_user_liked(vote) timer.intermediate("voter_likes") vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score link_valid = not (link._spam or link._deleted) if vote_valid and link_valid: add_to_author_query_q(link) add_to_subreddit_query_q(link) add_to_domain_query_q(link) timer.stop() timer.flush()
def process_message(msg): vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("link_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) link = Link._by_fullname(vote_data.pop("thing_fullname")) # create the vote and update the voter's liked/disliked under lock so # that the vote state and cached query are consistent lock_key = "vote-%s-%s" % (user._id36, link._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, link, vote_data) try: vote = Vote( user, link, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") update_user_liked(vote) timer.intermediate("voter_likes") vote_valid = vote.is_automatic_initial_vote or vote.effects.affects_score link_valid = not (link._spam or link._deleted) if vote_valid and link_valid: add_to_author_query_q(link) add_to_subreddit_query_q(link) add_to_domain_query_q(link) timer.stop() timer.flush()
def process_message(msg): # msg is *PROBABLY* json timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() # json being loaded into a python object # it has the fields "user_id", "thing_fullname" # a thing is a database object # it's a link, comment, post, whatever, everything can be upvoted/downvoted vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return # this gets the user from database/cache (either memcached or postgres, whatever) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") # this gets a servers-wide lock # I mean, a bunch of consumers might be consuming items that use the same "thing" (same database object) # so, you want a global lock to avoid them from f*****g eachother up # memcachd stores the lock, atomically lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def process_message(msg): timer = g.stats.get_timer("new_voting.%s" % queue) timer.start() vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return # if it's an old-style vote, convert to the new format if "uid" in vote_data: vote_data = convert_old_vote_data(vote_data, msg.timestamp) user = Account._byID(vote_data.pop("user_id"), data=True) thing = Thing._by_fullname(vote_data.pop("thing_fullname"), data=True) timer.intermediate("preamble") lock_key = "vote-%s-%s" % (user._id36, thing._fullname) with g.make_lock("voting", lock_key, timeout=5): print "Processing vote by %s on %s %s" % (user, thing, vote_data) try: vote = Vote( user, thing, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return timer.intermediate("create_vote_obj") vote.commit() timer.flush()
def get_cached_downvotes(content_cls): kind = content_cls.__name__.lower() downvotes = g.cache.get(self.vote_cache_key(kind)) if downvotes is None: vote_cls = Vote.rel(Account, content_cls) downvotes = len(list(vote_cls._query(Vote.c._thing1_id == self._id, Vote.c._name == str(-1)))) g.cache.set(self.vote_cache_key(kind), downvotes) return downvotes
def update_vote_lookups(user, thing, direction): """Store info about the existence of this vote (before processing).""" # set the vote in memcached so the UI gets updated immediately key = prequeued_vote_key(user, thing) grace_period = int(g.vote_queue_grace_period.total_seconds()) direction = Vote.serialize_direction(direction) g.gencache.set(key, direction, time=grace_period + 1) # update LastModified immediately to help us cull prequeued_vote lookups rel_cls = VotesByAccount.rel(thing.__class__) LastModified.touch(user._fullname, rel_cls._last_modified_name)
def update_vote_lookups(user, thing, direction): """Store info about the existence of this vote (before processing).""" # set the vote in memcached so the UI gets updated immediately key = prequeued_vote_key(user, thing) grace_period = int(g.vote_queue_grace_period.total_seconds()) direction = Vote.serialize_direction(direction) g.cache.set(key, direction, time=grace_period+1) # update LastModified immediately to help us cull prequeued_vote lookups rel_cls = VotesByAccount.rel(thing.__class__) LastModified.touch(user._fullname, rel_cls._last_modified_name)
def check_downvote(self): """Checks whether this account has enough karma to cast a downvote. An account's total number of downvotes must be less than or equal to the account's total karma. Raises an exception if not able to cast a downvote. """ from r2.models.vote import Vote downvote_count = g.cache.get(self.vote_cache_key()) if downvote_count is None: downvote_count = len(list(Vote._query(Vote.c._thing1_id == self._id, Vote.c._name == str(-1)))) g.cache.set(self.vote_cache_key(), downvote_count) karma_threshold = self.safe_karma * 4 if karma_threshold <= downvote_count: msg = _('Your total down votes (%d) must be less than four times your karma (%d)') % (downvote_count, karma_threshold) raise NotEnoughKarma(msg)
def get_cached_downvotes(content_cls): kind = content_cls.__name__.lower() cache_key = self.downvote_cache_key(kind) downvotes = g.cache.get(cache_key) if downvotes is None: vote_cls = Vote.rel(Account, content_cls) # Get a count of content_cls downvotes type = tdb.rel_types_id[vote_cls._type_id] # rt = rel table # dt = data table # tt = thing table rt, account_tt, content_cls_tt, dt = type.rel_table cols = [ sa.func.count(rt.c.rel_id) ] where = sa.and_(rt.c.thing1_id == self._id, rt.c.name == '-1') query = sa.select(cols, where) downvotes = query.execute().scalar() g.cache.set(cache_key, downvotes) return downvotes
def get_cached_downvotes(content_cls): kind = content_cls.__name__.lower() cache_key = self.downvote_cache_key(kind) downvotes = g.cache.get(cache_key) if downvotes is None: vote_cls = Vote.rel(Account, content_cls) # Get a count of content_cls downvotes type = tdb.rel_types_id[vote_cls._type_id] # rt = rel table # dt = data table # tt = thing table rt, account_tt, content_cls_tt, dt = type.rel_table cols = [sa.func.count(rt.c.rel_id)] where = sa.and_(rt.c.thing1_id == self._id, rt.c.name == '-1') query = sa.select(cols, where) downvotes = query.execute().scalar() g.cache.set(cache_key, downvotes) return downvotes
def process_message(msg): from r2.lib.comment_tree import write_comment_scores from r2.lib.db.queries import ( add_queries, add_to_commentstree_q, get_comments, ) from r2.models.builder import get_active_sort_orders_for_link vote_data = json.loads(msg.body) hook = hooks.get_hook('vote.validate_vote_data') if hook.call_until_return(msg=msg, vote_data=vote_data) is False: # Corrupt records in the queue. Ignore them. print "Ignoring invalid vote by %s on %s %s" % ( vote_data.get('user_id', '<unknown>'), vote_data.get('thing_fullname', '<unknown>'), vote_data) return timer = g.stats.get_timer("comment_vote_processor") timer.start() user = Account._byID(vote_data.pop("user_id")) comment = Comment._by_fullname(vote_data.pop("thing_fullname")) print "Processing vote by %s on %s %s" % (user, comment, vote_data) try: vote = Vote( user, comment, direction=vote_data["direction"], date=datetime.utcfromtimestamp(vote_data["date"]), data=vote_data["data"], event_data=vote_data.get("event_data"), ) except TypeError as e: # a vote on an invalid type got in the queue, just skip it g.log.exception("Invalid type: %r", e.message) return vote.commit() timer.intermediate("create_vote_object") vote_invalid = (not vote.effects.affects_score and not vote.is_automatic_initial_vote) comment_invalid = comment._spam or comment._deleted if vote_invalid or comment_invalid: timer.stop() timer.flush() return author = Account._byID(comment.author_id) add_queries( queries=[get_comments(author, sort, 'all') for sort in SORTS], insert_items=comment, ) timer.intermediate("author_queries") update_threshold = g.live_config['comment_vote_update_threshold'] update_period = g.live_config['comment_vote_update_period'] skip_score_update = (comment.num_votes > update_threshold and comment.num_votes % update_period != 0) # skip updating scores if this was the automatic initial vote. those # updates will be handled by new_comment. Also only update scores # periodically once a comment has many votes. if not vote.is_automatic_initial_vote and not skip_score_update: # check whether this link is using precomputed sorts, if it is # we'll need to push an update to commentstree_q link = Link._byID(comment.link_id) if get_active_sort_orders_for_link(link): # send this comment to commentstree_q where we will update # CommentScoresByLink, CommentTree (noop), and CommentOrderer add_to_commentstree_q(comment) else: # the link isn't using precomputed sorts, so just update the # scores write_comment_scores(link, [comment]) timer.intermediate("update_scores") timer.stop() timer.flush()
def cast_vote(self, **kw): kw.setdefault("date", datetime.now(pytz.UTC)) kw.setdefault("direction", Vote.DIRECTIONS.up) kw.setdefault("get_previous_vote", False) kw.setdefault("data", self.vote_data) return Vote(user=self.user, thing=self.thing, **kw)