def process_message(msgs, chan): """Update get_submitted(), the Links by author precomputed query. get_submitted() is a CachedResult which is stored in permacache. To update these objects we need to do a read-modify-write which requires obtaining a lock. Sharding these updates by author allows us to run multiple consumers (but ideally just one per shard) to avoid lock contention. """ from r2.lib.db.queries import add_queries, get_submitted link_names = {msg.body for msg in msgs} links = Link._by_fullname(link_names, return_dict=False) print 'Processing %r' % (links, ) links_by_author_id = defaultdict(list) for link in links: links_by_author_id[link.author_id].append(link) authors_by_id = Account._byID(links_by_author_id.keys()) for author_id, links in links_by_author_id.iteritems(): with g.stats.get_timer("link_vote_processor.author_queries"): author = authors_by_id[author_id] add_queries( queries=[ get_submitted(author, sort, 'all') for sort in SORTS ], insert_items=links, )
def query(self): q = None if self.where == 'overview': self.check_modified(self.vuser, 'overview') q = queries.get_overview(self.vuser, self.sort, self.time) elif self.where == 'comments': sup.set_sup_header(self.vuser, 'commented') self.check_modified(self.vuser, 'commented') q = queries.get_comments(self.vuser, self.sort, self.time) elif self.where == 'submitted': sup.set_sup_header(self.vuser, 'submitted') self.check_modified(self.vuser, 'submitted') q = queries.get_submitted(self.vuser, self.sort, self.time) elif self.where in ('liked', 'disliked'): sup.set_sup_header(self.vuser, self.where) self.check_modified(self.vuser, self.where) if self.where == 'liked': q = queries.get_liked(self.vuser) else: q = queries.get_disliked(self.vuser) elif self.where == 'hidden': q = queries.get_hidden(self.vuser) elif c.user_is_admin: q = admin_profile_query(self.vuser, self.where, desc('_date')) if q is None: return self.abort404() return q
def get_links(self, sort, time): from r2.lib.db import queries from r2.models import Link from r2.controllers.errors import UserRequiredException if not c.user_is_loggedin: raise UserRequiredException friends = self.get_important_friends(c.user._id) if not friends: return [] if g.use_query_cache: # with the precomputer enabled, this Subreddit only supports # being sorted by 'new'. it would be nice to have a # cleaner UI than just blatantly ignoring their sort, # though sort = 'new' time = 'all' friends = Account._byID(friends, return_dict=False) crs = [ queries.get_submitted(friend, sort, time) for friend in friends ] return queries.MergedCachedResults(crs) else: q = Link._query(Link.c.author_id == friends, sort=queries.db_sort(sort), data=True) if time != 'all': q._filter(queries.db_times[time]) return q
def get_links(self, sort, time): from r2.lib.db import queries from r2.models import Link from r2.controllers.errors import UserRequiredException if not c.user_is_loggedin: raise UserRequiredException friends = self.get_important_friends(c.user._id) if not friends: return [] if g.use_query_cache: # with the precomputer enabled, this Subreddit only supports # being sorted by 'new'. it would be nice to have a # cleaner UI than just blatantly ignoring their sort, # though sort = "new" time = "all" friends = Account._byID(friends, return_dict=False) crs = [queries.get_submitted(friend, sort, time) for friend in friends] return queries.MergedCachedResults(crs) else: q = Link._query(Link.c.author_id == friends, sort=queries.db_sort(sort), data=True) if time != "all": q._filter(queries.db_times[time]) return q
def query(self): q = None if self.where == "overview": self.check_modified(self.vuser, "overview") q = queries.get_overview(self.vuser, self.sort, self.time) elif self.where == "comments": sup.set_sup_header(self.vuser, "commented") self.check_modified(self.vuser, "commented") q = queries.get_comments(self.vuser, self.sort, self.time) elif self.where == "submitted": sup.set_sup_header(self.vuser, "submitted") self.check_modified(self.vuser, "submitted") q = queries.get_submitted(self.vuser, self.sort, self.time) elif self.where in ("liked", "disliked"): sup.set_sup_header(self.vuser, self.where) self.check_modified(self.vuser, self.where) if self.where == "liked": q = queries.get_liked(self.vuser) else: q = queries.get_disliked(self.vuser) elif self.where == "hidden": q = queries.get_hidden(self.vuser) elif c.user_is_admin: q = admin_profile_query(self.vuser, self.where, desc("_date")) if q is None: return self.abort404() return q
def process_message(msgs, chan): """Update get_submitted(), the Links by author precomputed query. get_submitted() is a CachedResult which is stored in permacache. To update these objects we need to do a read-modify-write which requires obtaining a lock. Sharding these updates by author allows us to run multiple consumers (but ideally just one per shard) to avoid lock contention. """ from r2.lib.db.queries import add_queries, get_submitted link_names = {msg.body for msg in msgs} links = Link._by_fullname(link_names, return_dict=False) print 'Processing %r' % (links,) links_by_author_id = defaultdict(list) for link in links: links_by_author_id[link.author_id].append(link) authors_by_id = Account._byID(links_by_author_id.keys()) for author_id, links in links_by_author_id.iteritems(): with g.stats.get_timer("link_vote_processor.author_queries"): author = authors_by_id[author_id] add_queries( queries=[ get_submitted(author, sort, 'all') for sort in SORTS], insert_items=links, )
def gen_keys(): yield promoted_memo_key # just let this one do its own writing load_all_reddits() yield queries.get_all_comments().iden l_q = Link._query(Link.c._spam == (True, False), Link.c._deleted == (True, False), sort=desc('_date'), data=True, ) for link in fetch_things2(l_q, verbosity): yield comments_key(link._id) yield last_modified_key(link, 'comments') a_q = Account._query(Account.c._spam == (True, False), sort=desc('_date'), ) for account in fetch_things2(a_q, verbosity): yield messages_key(account._id) yield last_modified_key(account, 'overview') yield last_modified_key(account, 'commented') yield last_modified_key(account, 'submitted') yield last_modified_key(account, 'liked') yield last_modified_key(account, 'disliked') yield queries.get_comments(account, 'new', 'all').iden yield queries.get_submitted(account, 'new', 'all').iden yield queries.get_liked(account).iden yield queries.get_disliked(account).iden yield queries.get_hidden(account).iden yield queries.get_saved(account).iden yield queries.get_inbox_messages(account).iden yield queries.get_unread_messages(account).iden yield queries.get_inbox_comments(account).iden yield queries.get_unread_comments(account).iden yield queries.get_inbox_selfreply(account).iden yield queries.get_unread_selfreply(account).iden yield queries.get_sent(account).iden sr_q = Subreddit._query(Subreddit.c._spam == (True, False), sort=desc('_date'), ) for sr in fetch_things2(sr_q, verbosity): yield last_modified_key(sr, 'stylesheet_contents') yield queries.get_links(sr, 'hot', 'all').iden yield queries.get_links(sr, 'new', 'all').iden for sort in 'top', 'controversial': for time in 'hour', 'day', 'week', 'month', 'year', 'all': yield queries.get_links(sr, sort, time, merge_batched=False).iden yield queries.get_spam_links(sr).iden yield queries.get_spam_comments(sr).iden yield queries.get_reported_links(sr).iden yield queries.get_reported_comments(sr).iden yield queries.get_subreddit_messages(sr).iden yield queries.get_unread_subreddit_messages(sr).iden
def query(self): q = None if self.where == 'overview': self.check_modified(self.vuser, 'overview') q = queries.get_overview(self.vuser, self.sort, self.time) elif self.where == 'comments': sup.set_sup_header(self.vuser, 'commented') self.check_modified(self.vuser, 'commented') q = queries.get_comments(self.vuser, self.sort, self.time) elif self.where == 'submitted': sup.set_sup_header(self.vuser, 'submitted') self.check_modified(self.vuser, 'submitted') q = queries.get_submitted(self.vuser, self.sort, self.time) elif self.where == 'gilded': sup.set_sup_header(self.vuser, 'gilded') self.check_modified(self.vuser, 'gilded') if self.show == 'given': q = queries.get_user_gildings(self.vuser) else: q = queries.get_gilded_user_comments(self.vuser) elif self.where in ('liked', 'disliked'): sup.set_sup_header(self.vuser, self.where) self.check_modified(self.vuser, self.where) if self.where == 'liked': q = queries.get_liked(self.vuser) else: q = queries.get_disliked(self.vuser) elif self.where == 'hidden': q = queries.get_hidden(self.vuser) elif self.where == 'saved': srname = request.GET.get('sr') if srname and c.user.gold: try: sr_id = Subreddit._by_name(srname)._id except NotFound: sr_id = None else: sr_id = None q = queries.get_saved(self.vuser, sr_id) elif c.user_is_sponsor and self.where == 'promoted': q = queries.get_promoted_links(self.vuser._id) if q is None: return self.abort404() return q
def query(self): q = None if self.where == "overview": self.check_modified(self.vuser, "overview") q = queries.get_overview(self.vuser, self.sort, self.time) elif self.where == "comments": sup.set_sup_header(self.vuser, "commented") self.check_modified(self.vuser, "commented") q = queries.get_comments(self.vuser, self.sort, self.time) elif self.where == "submitted": sup.set_sup_header(self.vuser, "submitted") self.check_modified(self.vuser, "submitted") q = queries.get_submitted(self.vuser, self.sort, self.time) elif self.where == "gilded": sup.set_sup_header(self.vuser, "gilded") self.check_modified(self.vuser, "gilded") if self.show == "given": q = queries.get_user_gildings(self.vuser) else: q = queries.get_gilded_user_comments(self.vuser) elif self.where in ("liked", "disliked"): sup.set_sup_header(self.vuser, self.where) self.check_modified(self.vuser, self.where) if self.where == "liked": q = queries.get_liked(self.vuser) else: q = queries.get_disliked(self.vuser) elif self.where == "hidden": q = queries.get_hidden(self.vuser) elif self.where == "saved": srname = request.GET.get("sr") if srname and c.user.gold: try: sr_id = Subreddit._by_name(srname)._id except NotFound: sr_id = None else: sr_id = None q = queries.get_saved(self.vuser, sr_id) elif c.user_is_sponsor and self.where == "promoted": q = queries.get_promoted_links(self.vuser._id) if q is None: return self.abort404() return q
def gen_keys(): yield promoted_memo_key # just let this one do its own writing load_all_reddits() yield queries.get_all_comments().iden l_q = Link._query( Link.c._spam == (True, False), Link.c._deleted == (True, False), sort=desc("_date"), data=True ) for link in fetch_things2(l_q, verbosity): yield comments_key(link._id) yield last_modified_key(link, "comments") a_q = Account._query(Account.c._spam == (True, False), sort=desc("_date")) for account in fetch_things2(a_q, verbosity): yield messages_key(account._id) yield last_modified_key(account, "overview") yield last_modified_key(account, "commented") yield last_modified_key(account, "submitted") yield last_modified_key(account, "liked") yield last_modified_key(account, "disliked") yield queries.get_comments(account, "new", "all").iden yield queries.get_submitted(account, "new", "all").iden yield queries.get_liked(account).iden yield queries.get_disliked(account).iden yield queries.get_hidden(account).iden yield queries.get_saved(account).iden yield queries.get_inbox_messages(account).iden yield queries.get_unread_messages(account).iden yield queries.get_inbox_comments(account).iden yield queries.get_unread_comments(account).iden yield queries.get_inbox_selfreply(account).iden yield queries.get_unread_selfreply(account).iden yield queries.get_sent(account).iden sr_q = Subreddit._query(Subreddit.c._spam == (True, False), sort=desc("_date")) for sr in fetch_things2(sr_q, verbosity): yield last_modified_key(sr, "stylesheet_contents") yield queries.get_links(sr, "hot", "all").iden yield queries.get_links(sr, "new", "all").iden for sort in "top", "controversial": for time in "hour", "day", "week", "month", "year", "all": yield queries.get_links(sr, sort, time, merge_batched=False).iden yield queries.get_spam_links(sr).iden yield queries.get_spam_comments(sr).iden yield queries.get_reported_links(sr).iden yield queries.get_reported_comments(sr).iden yield queries.get_subreddit_messages(sr).iden yield queries.get_unread_subreddit_messages(sr).iden
def query(self): q = None if self.where == 'overview': self.check_modified(self.vuser, 'overview') q = queries.get_overview(self.vuser, self.sort, self.time) elif self.where == 'comments': sup.set_sup_header(self.vuser, 'commented') self.check_modified(self.vuser, 'commented') q = queries.get_comments(self.vuser, self.sort, self.time) elif self.where == 'submitted': sup.set_sup_header(self.vuser, 'submitted') self.check_modified(self.vuser, 'submitted') q = queries.get_submitted(self.vuser, self.sort, self.time) elif self.where == 'gilded': sup.set_sup_header(self.vuser, 'gilded') self.check_modified(self.vuser, 'gilded') if self.show == 'given': q = queries.get_user_gildings(self.vuser) else: q = queries.get_gilded_user(self.vuser) elif self.where in ('liked', 'disliked'): sup.set_sup_header(self.vuser, self.where) self.check_modified(self.vuser, self.where) if self.where == 'liked': q = queries.get_liked(self.vuser) else: q = queries.get_disliked(self.vuser) elif self.where == 'hidden': q = queries.get_hidden(self.vuser) elif self.where == 'saved': if not self.savedcategory and c.user.gold: self.builder_cls = SavedBuilder sr_id = self.savedsr._id if self.savedsr else None q = queries.get_saved(self.vuser, sr_id, category=self.savedcategory) elif c.user_is_sponsor and self.where == 'promoted': q = queries.get_promoted_links(self.vuser._id) if q is None: return self.abort404() return q
def query(self): q = None if self.where == "overview": self.check_modified(self.vuser, "overview") q = queries.get_overview(self.vuser, self.sort, self.time) elif self.where == "comments": sup.set_sup_header(self.vuser, "commented") self.check_modified(self.vuser, "commented") q = queries.get_comments(self.vuser, self.sort, self.time) elif self.where == "submitted": sup.set_sup_header(self.vuser, "submitted") self.check_modified(self.vuser, "submitted") q = queries.get_submitted(self.vuser, self.sort, self.time) elif self.where == "gilded": sup.set_sup_header(self.vuser, "gilded") self.check_modified(self.vuser, "gilded") if self.show == "given": q = queries.get_user_gildings(self.vuser) else: q = queries.get_gilded_user(self.vuser) elif self.where in ("liked", "disliked"): sup.set_sup_header(self.vuser, self.where) self.check_modified(self.vuser, self.where) if self.where == "liked": q = queries.get_liked(self.vuser) else: q = queries.get_disliked(self.vuser) elif self.where == "hidden": q = queries.get_hidden(self.vuser) elif self.where == "saved": if not self.savedcategory and c.user.gold: self.builder_cls = SavedBuilder sr_id = self.savedsr._id if self.savedsr else None q = queries.get_saved(self.vuser, sr_id, category=self.savedcategory) elif c.user_is_sponsor and self.where == "promoted": q = queries.get_promoted_links(self.vuser._id) if q is None: return self.abort404() return q
def query(self): q = None if self.where == 'profile': q = object # dummy value if self.where == 'overview': self.skip = True self.check_modified(self.vuser, 'overview') q = queries.get_overview(self.vuser, 'new', 'all') elif self.where == 'overviewrss': self.check_modified(self.vuser, 'overviewrss') q = queries.get_overview(self.vuser, 'new', 'all') elif self.where == 'comments': self.check_modified(self.vuser, 'commented') q = queries.get_comments(self.vuser, 'new', 'all') elif self.where == 'commentsrss': self.check_modified(self.vuser, 'commented') q = queries.get_comments(self.vuser, 'new', 'all') elif self.where == 'submitted': self.skip = True self.check_modified(self.vuser, 'submitted') q = queries.get_submitted(self.vuser, 'new', 'all') elif self.where in ('liked', 'disliked'): self.check_modified(self.vuser, self.where) if self.where == 'liked': q = queries.get_liked(self.vuser, not c.user_is_admin) else: q = queries.get_disliked(self.vuser, not c.user_is_admin) elif self.where == 'hidden': q = queries.get_hidden(self.vuser, not c.user_is_admin) elif self.where == 'drafts': q = queries.get_drafts(self.vuser) if q is None: return self.abort404() return q
def get_links(self, sort, time): from r2.lib.db import queries if not c.user_is_loggedin: raise UserRequiredException friends = self.get_important_friends(c.user._id) if not friends: return [] # with the precomputer enabled, this Subreddit only supports # being sorted by 'new'. it would be nice to have a # cleaner UI than just blatantly ignoring their sort, # though sort = "new" time = "all" friends = Account._byID(friends, return_dict=False) crs = [queries.get_submitted(friend, sort, time) for friend in friends] return queries.MergedCachedResults(crs)
def get_links(self, sort, time): from r2.lib.db import queries if not c.user_is_loggedin: raise UserRequiredException friends = self.get_important_friends(c.user._id) if not friends: return [] # with the precomputer enabled, this Subreddit only supports # being sorted by 'new'. it would be nice to have a # cleaner UI than just blatantly ignoring their sort, # though sort = 'new' time = 'all' friends = Account._byID(friends, return_dict=False) crs = [queries.get_submitted(friend, sort, time) for friend in friends] return queries.MergedCachedResults(crs)
def query(self): q = None if self.where == 'overview': self.check_modified(self.vuser, 'overview') q = queries.get_overview(self.vuser, self.sort, self.time) elif self.where == 'comments': sup.set_sup_header(self.vuser, 'commented') self.check_modified(self.vuser, 'commented') q = queries.get_comments(self.vuser, self.sort, self.time) elif self.where == 'submitted': sup.set_sup_header(self.vuser, 'submitted') self.check_modified(self.vuser, 'submitted') q = queries.get_submitted(self.vuser, self.sort, self.time) elif self.where in ('liked', 'disliked'): sup.set_sup_header(self.vuser, self.where) self.check_modified(self.vuser, self.where) if self.where == 'liked': q = queries.get_liked(self.vuser) else: q = queries.get_disliked(self.vuser) elif self.where == 'hidden': q = queries.get_hidden(self.vuser) elif self.where == 'saved': q = queries.get_saved(self.vuser) elif c.user_is_sponsor and self.where == 'promoted': q = queries.get_promoted_links(self.vuser._id) if q is None: return self.abort404() return q
def query(self): q = None if self.where == 'overview': self.check_modified(self.vuser, 'overview') q = queries.get_overview(self.vuser, self.sort, self.time) elif self.where == 'comments': sup.set_sup_header(self.vuser, 'commented') self.check_modified(self.vuser, 'commented') q = queries.get_comments(self.vuser, self.sort, self.time) elif self.where == 'submitted': sup.set_sup_header(self.vuser, 'submitted') self.check_modified(self.vuser, 'submitted') q = queries.get_submitted(self.vuser, self.sort, self.time) elif self.where in ('liked', 'disliked'): sup.set_sup_header(self.vuser, self.where) self.check_modified(self.vuser, self.where) if self.where == 'liked': q = queries.get_liked(self.vuser) else: q = queries.get_disliked(self.vuser) elif self.where == 'hidden': q = queries.get_hidden(self.vuser) elif self.where == 'saved': q = queries.get_saved(self.vuser) elif c.user_is_sponsor and self.where == 'promoted': q = promote.get_all_links(self.vuser._id) if q is None: return self.abort404() return q
def _get_modmail_userinfo(self, conversation, sr=None): if conversation.is_internal: raise ValueError('Cannot get userinfo for internal conversations') if not sr: sr = Subreddit._by_fullname(conversation.owner_fullname) # Retrieve the participant associated with the conversation try: account = conversation.get_participant_account() if not account: raise ValueError('No account associated with convo') permatimeout = (account.in_timeout and account.days_remaining_in_timeout == 0) if account._deleted or permatimeout: raise ValueError('User info is inaccessible') except NotFound: raise NotFound('Unable to retrieve conversation participant') # Fetch the mute and ban status of the participant as it relates # to the subreddit associated with the conversation. mute_status = sr.is_muted(account) ban_status = sr.is_banned(account) # Parse the ban status and retrieve the length of the ban, # then output the data into a serialiazable dict ban_result = { 'isBanned': bool(ban_status), 'reason': '', 'endDate': None, 'isPermanent': False } if ban_status: ban_result['reason'] = getattr(ban_status, 'note', '') ban_duration = sr.get_tempbans('banned', account.name) ban_duration = ban_duration.get(account.name) if ban_duration: ban_result['endDate'] = ban_duration.isoformat() else: ban_result['isPermanent'] = True ban_result['endDate'] = None # Parse the mute status and retrieve the length of the ban, # then output the data into the serialiazable dict mute_result = { 'isMuted': bool(mute_status), 'endDate': None, 'reason': '' } if mute_status: mute_result['reason'] = getattr(mute_status, 'note', '') muted_items = sr.get_muted_items(account.name) mute_duration = muted_items.get(account.name) if mute_duration: mute_result['endDate'] = mute_duration.isoformat() # Retrieve the participants post and comment fullnames from cache post_fullnames = [] comment_fullnames = [] if not account._spam: post_fullnames = list(queries.get_submitted(account, 'new', 'all'))[:100] comment_fullnames = list( queries.get_comments(account, 'new', 'all'))[:100] # Retrieve the associated link objects for posts and comments # using the retrieve fullnames, afer the link objects are retrieved # create a serializable dict with the the necessary information from # the endpoint. lookup_fullnames = list(set(post_fullnames) | set(comment_fullnames)) posts = Thing._by_fullname(lookup_fullnames) serializable_posts = {} for fullname in post_fullnames: if len(serializable_posts) == 3: break post = posts[fullname] if post.sr_id == sr._id and not post._deleted: serializable_posts[fullname] = { 'title': post.title, 'permalink': post.make_permalink(sr, force_domain=True), 'date': post._date.isoformat(), } # Extract the users most recent comments associated with the # subreddit sr_comments = [] for fullname in comment_fullnames: if len(sr_comments) == 3: break comment = posts[fullname] if comment.sr_id == sr._id and not comment._deleted: sr_comments.append(comment) # Retrieve all associated link objects (combines lookup) comment_links = Link._byID( [sr_comment.link_id for sr_comment in sr_comments]) # Serialize all of the user's sr comments serializable_comments = {} for sr_comment in sr_comments: comment_link = comment_links[sr_comment.link_id] comment_body = sr_comment.body if len(comment_body) > 140: comment_body = '{:.140}...'.format(comment_body) serializable_comments[sr_comment._fullname] = { 'title': comment_link.title, 'comment': comment_body, 'permalink': sr_comment.make_permalink(comment_link, sr, force_domain=True), 'date': sr_comment._date.isoformat(), } return { 'id': account._fullname, 'name': account.name, 'created': account._date.isoformat(), 'banStatus': ban_result, 'isShadowBanned': account._spam, 'muteStatus': mute_result, 'recentComments': serializable_comments, 'recentPosts': serializable_posts, }