def new_report(thing): if isinstance(thing, Link): sr = Subreddit._byID(thing.sr_id) add_queries([get_reported_links(sr)], insert_items = thing) elif isinstance(thing, Comment): sr = Subreddit._byID(thing.sr_id) add_queries([get_reported_comments(sr)], insert_items = thing)
def new_report(thing): if isinstance(thing, Link): sr = Subreddit._byID(thing.sr_id) add_queries([get_reported_links(sr)], insert_items=thing) elif isinstance(thing, Comment): sr = Subreddit._byID(thing.sr_id) add_queries([get_reported_comments(sr)], insert_items=thing)
def process_message(msgs, chan): """Update get_links(), the Links by Subreddit precomputed query. get_links() is a CachedResult which is stored in permacache. To update these objects we need to do a read-modify-write which requires obtaining a lock. Sharding these updates by subreddit allows us to run multiple consumers (but ideally just one per shard) to avoid lock contention. """ from r2.lib.db.queries import add_queries, get_links link_names = {msg.body for msg in msgs} links = Link._by_fullname(link_names, return_dict=False) print 'Processing %r' % (links,) links_by_sr_id = defaultdict(list) for link in links: links_by_sr_id[link.sr_id].append(link) srs_by_id = Subreddit._byID(links_by_sr_id.keys(), stale=True) for sr_id, links in links_by_sr_id.iteritems(): with g.stats.get_timer("link_vote_processor.subreddit_queries"): sr = srs_by_id[sr_id] add_queries( queries=[get_links(sr, sort, "all") for sort in SORTS], insert_items=links, )
def _run_new_comment(msg): fname = msg.body comment = Comment._by_fullname(fname, data=True) sr = Subreddit._byID(comment.sr_id) add_queries( [get_all_comments(), get_sr_comments(sr)], insert_items=[comment])
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [get_comments(author, 'new', 'all')] if comment._deleted: job.append(get_all_comments()) add_queries(job, delete_items=comment) else: sr = Subreddit._byID(comment.sr_id) if comment._spam: job.append(get_spam_comments(sr)) add_queries(job, insert_items=comment) amqp.add_item('new_comment', comment._fullname) if not g.amqp_host: add_comment_tree([comment]) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments (to minimise lock contention) if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 if inbox_rel._name == "inbox": add_queries([get_inbox_comments(inbox_owner)], insert_items=inbox_rel) else: add_queries([get_inbox_selfreply(inbox_owner)], insert_items=inbox_rel) set_unread(comment, inbox_owner, True)
def get_links(cls, event_id): link_ids = cls._get_related_link_ids(event_id) links = Link._byID(link_ids, data=True, return_dict=False) links.sort(key=lambda L: L.num_comments, reverse=True) sr_ids = set(L.sr_id for L in links) subreddits = Subreddit._byID(sr_ids, data=True) wrapped = [] for link in links: w = Wrapped(link) if w._spam or w._deleted: continue if not getattr(w, "allow_liveupdate", True): continue w.subreddit = subreddits[link.sr_id] # ideally we'd check if the user can see the subreddit, but by # doing this we keep everything user unspecific which makes caching # easier. if w.subreddit.type == "private": continue comment_label = ungettext("comment", "comments", link.num_comments) w.comments_label = strings.number_label % dict( num=link.num_comments, thing=comment_label) wrapped.append(w) return wrapped
def new_promotion(title, url, user, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ sr = Subreddit._byID(get_promote_srid()) l = Link._submit(title, url, user, sr, ip) l.promoted = True l.disable_comments = False PromotionLog.add(l, 'promotion created') l._commit() # set the status of the link, populating the query queue if c.user_is_sponsor or user.trusted_sponsor: set_promote_status(l, PROMOTE_STATUS.accepted) else: set_promote_status(l, PROMOTE_STATUS.unpaid) # the user has posted a promotion, so enable the promote menu unless # they have already opted out if user.pref_show_promote is not False: user.pref_show_promote = True user._commit() # notify of new promo emailer.new_promo(l) return l
def get_spam(sr): if isinstance(sr, ModContribSR): srs = Subreddit._byID(sr.sr_ids(), return_dict=False) results = [get_spam_links(sr) for sr in srs] return merge_results(*results) else: return merge_results(get_spam_links(sr), get_spam_comments(sr))
def get_adzerk_promo(user, site): srids = promote.has_live_promos(user, site) if not srids: return if '' in srids: srnames = [Frontpage.name] srids.remove('') else: srnames = [] srs = Subreddit._byID(srids, data=True, return_dict=False) srnames.extend([sr.name for sr in srs]) response = adzerk_request(srnames) if not response: return promo_tuples = [promote.PromoTuple(response.link, 1., response.campaign)] builder = CampaignBuilder(promo_tuples, keep_fn=organic.keep_fresh_links) promoted_links = builder.get_items()[0] if promoted_links: w = promoted_links[0] w.adserver_imp_pixel = response.imp_pixel w.adserver_click_url = response.click_url return w
def __init__(self): Wrapped.__init__(self) my_reddits = [] sr_ids = Subreddit.user_subreddits(c.user if c.user_is_loggedin else None) if sr_ids: my_reddits = Subreddit._byID(sr_ids, True, return_dict = False) my_reddits.sort(key = lambda sr: sr.name.lower()) drop_down_buttons = [] for sr in my_reddits: drop_down_buttons.append(SubredditButton(sr)) #leaving the 'home' option out for now #drop_down_buttons.insert(0, NamedButton('home', sr_path = False, # css_class = 'top-option', # dest = '/')) drop_down_buttons.append(NamedButton('edit', sr_path = False, css_class = 'bottom-option', dest = '/reddits/')) self.sr_dropdown = SubredditMenu(drop_down_buttons, title = _('my reddits'), type = 'srdrop') pop_reddits = Subreddit.default_srs(c.content_langs, limit = 30) buttons = [SubredditButton(sr) for sr in c.recent_reddits] for sr in pop_reddits: if sr not in c.recent_reddits: buttons.append(SubredditButton(sr)) self.sr_bar = NavMenu(buttons, type='flatlist', separator = '-', _id = 'sr-bar')
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [get_comments(author, 'new', 'all')] if comment._deleted: job.append(get_all_comments()) add_queries(job, delete_items = comment) else: if comment._spam: sr = Subreddit._byID(comment.sr_id) job.append(get_spam_comments(sr)) add_queries(job, insert_items = comment) amqp.add_item('new_comment', comment._fullname) if not g.amqp_host: add_comment_tree([comment]) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments (to minimise lock contention) if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 if inbox_rel._name == "inbox": add_queries([get_inbox_comments(inbox_owner)], insert_items = inbox_rel) else: add_queries([get_inbox_selfreply(inbox_owner)], insert_items = inbox_rel) set_unread(comment, inbox_owner, True)
def process_message(msgs, chan): """Update get_links(), the Links by Subreddit precomputed query. get_links() is a CachedResult which is stored in permacache. To update these objects we need to do a read-modify-write which requires obtaining a lock. Sharding these updates by subreddit allows us to run multiple consumers (but ideally just one per shard) to avoid lock contention. """ from r2.lib.db.queries import add_queries, get_links link_names = {msg.body for msg in msgs} links = Link._by_fullname(link_names, return_dict=False) print 'Processing %r' % (links, ) links_by_sr_id = defaultdict(list) for link in links: links_by_sr_id[link.sr_id].append(link) srs_by_id = Subreddit._byID(links_by_sr_id.keys(), stale=True) for sr_id, links in links_by_sr_id.iteritems(): with g.stats.get_timer("link_vote_processor.subreddit_queries"): sr = srs_by_id[sr_id] add_queries( queries=[get_links(sr, sort, "all") for sort in SORTS], insert_items=links, )
def new_promotion(title, url, selftext, user, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ sr = Subreddit._byID(get_promote_srid()) l = Link._submit(title, url, user, sr, ip) l.promoted = True l.disable_comments = False PromotionLog.add(l, 'promotion created') if url == 'self': l.url = l.make_permalink_slow() l.is_self = True l.selftext = selftext l._commit() # set the status of the link, populating the query queue if c.user_is_sponsor or user.trusted_sponsor: set_promote_status(l, PROMOTE_STATUS.accepted) else: set_promote_status(l, PROMOTE_STATUS.unpaid) # the user has posted a promotion, so enable the promote menu unless # they have already opted out if user.pref_show_promote is not False: user.pref_show_promote = True user._commit() # notify of new promo emailer.new_promo(l) return l
def get_reported(sr): if isinstance(sr, ModContribSR): srs = Subreddit._byID(sr.sr_ids(), return_dict=False) results = [get_reported_links(sr) for sr in srs] return merge_results(*results) else: return get_reported_links(sr)
def batch_lookups(self): super(SolrLinkUploader, self).batch_lookups() author_ids = [ thing.author_id for thing in self.things if hasattr(thing, 'author_id') ] try: self.accounts = Account._byID(author_ids, data=True, return_dict=True) except NotFound: if self.use_safe_get: self.accounts = safe_get(Account._byID, author_ids, data=True, return_dict=True) else: raise sr_ids = [ thing.sr_id for thing in self.things if hasattr(thing, 'sr_id') ] try: self.srs = Subreddit._byID(sr_ids, data=True, return_dict=True) except NotFound: if self.use_safe_get: self.srs = safe_get(Subreddit._byID, sr_ids, data=True, return_dict=True) else: raise
def update_karmas(): for pair in to_update(): user = Account._byID(pair[0], True) sr = Subreddit._byID(pair[1], True) print user.name, sr.name user.incr_karma('comment', sr, 20)
def get_spam(sr): if isinstance(sr, ModContribSR): srs = Subreddit._byID(sr.sr_ids, return_dict=False) results = [get_spam_links(sr) for sr in srs] return merge_results(*results) else: return merge_results(get_spam_links(sr), get_spam_comments(sr))
def new_promotion(title, url, selftext, user, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ sr = Subreddit._byID(Subreddit.get_promote_srid()) l = Link._submit(title, url, user, sr, ip) l.promoted = True l.disable_comments = False l.sendreplies = True PromotionLog.add(l, 'promotion created') if url == 'self': l.url = l.make_permalink_slow() l.is_self = True l.selftext = selftext l._commit() update_promote_status(l, PROMOTE_STATUS.unpaid) # the user has posted a promotion, so enable the promote menu unless # they have already opted out if user.pref_show_promote is not False: user.pref_show_promote = True user._commit() # notify of new promo emailer.new_promo(l) return l
def can_create_subreddit(self): hook = hooks.get_hook("account.can_create_subreddit") can_create = hook.call_until_return(account=self) if can_create is not None: return can_create min_age = timedelta(days=g.live_config["create_sr_account_age_days"]) if self._age < min_age: return False if (self.link_karma < g.live_config["create_sr_link_karma"] and self.comment_karma < g.live_config["create_sr_comment_karma"]): return False # CUSTOM - allow subreddit creation once every X days # To avoid checking all subs, we get a list of user's contributed to subs, # and then check the sub author and sub creation date. Works even if they # create a sub then quit as moderator. # NOTE: user_subreddits() safely covers subs returned by special_reddits() with "contributor" and "moderator" # TODO: Use the can_create_subreddit hook to do this stuff elsewhere if g.live_config["create_sr_ratelimit_once_per_days"] > 0: from r2.models import Subreddit user_sr_ids = Subreddit.user_subreddits(self) if user_sr_ids: min_last_created = datetime.today() - timedelta(days=int(g.live_config["create_sr_ratelimit_once_per_days"])) srs = Subreddit._byID(user_sr_ids) for sr in srs.itervalues(): if sr.author_id == self._id and sr._date > min_last_created.replace(tzinfo=g.tz) and not c.user_is_admin and not self.employee: # g.log.warning("!!! dbg: user %s cannot create sub, created %s at %s, once every %s days max" % (self.name, sr.name, sr._date, g.live_config["create_sr_ratelimit_once_per_days"])) return False if self.is_global_banned: return True return True
def new_promotion(is_self, title, content, author, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ sr = Subreddit._byID(Subreddit.get_promote_srid()) l = Link._submit( is_self=is_self, title=title, content=content, author=author, sr=sr, ip=ip, ) l.promoted = True l.disable_comments = False l.sendreplies = True PromotionLog.add(l, 'promotion created') update_promote_status(l, PROMOTE_STATUS.unpaid) # the user has posted a promotion, so enable the promote menu unless # they have already opted out if author.pref_show_promote is not False: author.pref_show_promote = True author._commit() # notify of new promo emailer.new_promo(l) return l
def update_flair_counts(): flairs = Counter() user_ids = [] sr = Subreddit._byID(g.live_config["thebutton_srid"], data=True) raw = [ba._id36 for ba in ButtonActivity._all()] for user_chunk in in_chunks(user_ids, size=100): users = Account._byID36(user_chunk, data=True, return_dict=False) for user in users: flair = user.flair_css_class(sr._id) if not flair: if user._date < ACCOUNT_CREATION_CUTOFF: flair = "no-press" else: flair = "cant-press" flairs[flair] += 1 if 'cheater' in flairs: del flairs['cheater'] sr.flair_counts = sorted(flairs.iteritems(), key=lambda x: 'z' if x[0] == 'no-press' else x[0], reverse=True) sr._commit()
def normalized_hot_cached(sr_ids): results = [] srs = Subreddit._byID(sr_ids, data = True, return_dict = False) for sr in srs: #items = get_hot(sr) items = filter(lambda l: l._date > utils.timeago('%d day' % g.HOT_PAGE_AGE), get_hot(sr)) if not items: continue top_score = max(items[0]._hot, 1) top, rest = items[:2], items[2:] if top: normals = [l._hot / top_score for l in top] results.extend((l, random.choice(normals)) for l in top) #random.shuffle(normals) #results.extend((l, normals.pop()) for l in top) if rest: results.extend((l, l._hot / top_score) for l in rest) results.sort(key = lambda x: (x[1], x[0]._hot), reverse = True) return [l[0]._fullname for l in results]
def get_reply_to_address(message): """Construct a reply-to address that encodes the message id. The address is of the form: zendeskreply+{message_id36}-{email_mac} where the mac is generated from {message_id36} using the `modmail_email_secret` The reply address should be configured with the inbound email service so that replies to our messages are routed back to the app somehow. For mailgun this involves adding a Routes filter for messages sent to "zendeskreply\+*@". to be forwarded to POST /api/zendeskreply. """ # all email replies are treated as replies to the first message in the # conversation. this is to get around some peculiarities of zendesk if message.first_message: first_message = Message._byID(message.first_message, data=True) else: first_message = message email_id = first_message._id36 email_mac = hmac.new( g.secrets['modmail_email_secret'], email_id, hashlib.sha256).hexdigest() reply_id = "zendeskreply+{email_id}-{email_mac}".format( email_id=email_id, email_mac=email_mac) sr = Subreddit._byID(message.sr_id, data=True) return "{brander_community_abbr}/{subreddit} mail <{reply_id}@{domain}>".format( subreddit=sr.name, reply_id=reply_id, domain=g.modmail_email_domain, brander_community_abbr = g.brander_community_abbr)
def get_reply_to_address(message): """Construct a reply-to address that encodes the message id. The address is of the form: zendeskreply+{message_id36}-{email_mac} where the mac is generated from {message_id36} using the `modmail_email_secret` The reply address should be configured with the inbound email service so that replies to our messages are routed back to the app somehow. For mailgun this involves adding a Routes filter for messages sent to "zendeskreply\+*@". to be forwarded to POST /api/zendeskreply. """ # all email replies are treated as replies to the first message in the # conversation. this is to get around some peculiarities of zendesk if message.first_message: first_message = Message._byID(message.first_message, data=True) else: first_message = message email_id = first_message._id36 email_mac = hmac.new( g.secrets['modmail_email_secret'], email_id, hashlib.sha256).hexdigest() reply_id = "zendeskreply+{email_id}-{email_mac}".format( email_id=email_id, email_mac=email_mac) sr = Subreddit._byID(message.sr_id, data=True) return "r/{subreddit} mail <{reply_id}@{domain}>".format( subreddit=sr.name, reply_id=reply_id, domain=g.modmail_email_domain)
def moderator_messages(sr_ids): from r2.models import Subreddit srs = Subreddit._byID(sr_ids) sr_ids = [ sr_id for sr_id, sr in srs.iteritems() if sr.is_moderator_with_perms(c.user, 'mail') ] def multi_load_tree(sr_ids): res = {} for sr_id in sr_ids: trees = subreddit_messages_nocache(srs[sr_id]) if trees: res[sr_id] = trees return res res = sgm(g.permacache, sr_ids, miss_fn=multi_load_tree, prefix=sr_messages_key("")) res = { sr_id: filter_new_modmail(srs[sr_id], trees) for sr_id, trees in res.iteritems() } return sorted(chain(*res.values()), key=tree_sort_fn, reverse=True)
def new_promotion(title, url, selftext, user, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ sr = Subreddit._byID(get_promote_srid()) l = Link._submit(title, url, user, sr, ip) l.promoted = True l.disable_comments = False PromotionLog.add(l, "promotion created") if url == "self": l.url = l.make_permalink_slow() l.is_self = True l.selftext = selftext l._commit() update_promote_status(l, PROMOTE_STATUS.unpaid) # the user has posted a promotion, so enable the promote menu unless # they have already opted out if user.pref_show_promote is not False: user.pref_show_promote = True user._commit() # notify of new promo emailer.new_promo(l) return l
def migrate_scan_adjustments(self): adjs = KarmaAdjustment._query(data=True) for adj in adjs: sr = Subreddit._byID(adj.sr_id) gravity = 'ups' if adj.amount >= 0 else 'downs' key = 'karma_{0}_adjustment_{1}'.format(gravity, sr.name) self.new_values[adj.account_id][key] += abs(adj.amount)
def update_flair_counts(): flairs = Counter() user_ids = [] sr = Subreddit._byID(g.live_config["thebutton_srid"], data=True) raw = AccountsActiveBySR._cf.xget(sr._id36) for uid, _ in raw: user_ids.append(uid) for user_chunk in in_chunks(user_ids, size=100): users = Account._byID36(user_chunk, data=True, return_dict=False) for user in users: flair = user.flair_css_class(sr._id) if not flair: if user._date < ACCOUNT_CREATION_CUTOFF: flair = "no-press" else: flair = "cant-press" flairs[flair] += 1 if 'cheater' in flairs: del flairs['cheater'] sr.flair_counts = sorted( flairs.iteritems(), key=lambda x: 'z' if x[0] == 'no-press' else x[0], reverse=True) sr._commit()
def multi_load_tree(sr_ids): srs = Subreddit._byID(sr_ids, return_dict=False) res = {} for sr in srs: trees = subreddit_messages_nocache(sr) if trees: res[sr._id] = trees return res
def get_unmoderated(sr): q = [] if isinstance(sr, MultiReddit): srs = Subreddit._byID(sr.sr_ids, return_dict=False) q.extend(get_unmoderated_links(sr) for sr in srs) else: q.append(get_unmoderated_links(sr)) return q
def _run_new_comment(msg): fname = msg.body comment = Comment._by_fullname(fname,data=True) sr = Subreddit._byID(comment.sr_id) add_queries([get_all_comments(), get_sr_comments(sr)], insert_items = [comment])
def multi_load_tree(sr_ids): srs = Subreddit._byID(sr_ids, return_dict = False) res = {} for sr in srs: trees = subreddit_messages_nocache(sr) if trees: res[sr._id] = trees return res
def get_reported(sr): if isinstance(sr, (ModContribSR, MultiReddit)): srs = Subreddit._byID(sr.sr_ids, return_dict=False) q = [] q.extend(get_reported_links(sr) for sr in srs) q.extend(get_reported_comments(sr) for sr in srs) return q else: return [get_reported_links(sr), get_reported_comments(sr)]
def get_reported(sr): if isinstance(sr, (ModContribSR, MultiReddit)): srs = Subreddit._byID(sr.sr_ids, return_dict=False) results = [] results.extend(get_reported_links(sr) for sr in srs) results.extend(get_reported_comments(sr) for sr in srs) return merge_results(*results) else: return merge_results(get_reported_links(sr), get_reported_comments(sr))
def get_reported(sr): if isinstance(sr, ModContribSR): srs = Subreddit._byID(sr.sr_ids, return_dict=False) results = [] results.extend(get_reported_links(sr) for sr in srs) results.extend(get_reported_comments(sr) for sr in srs) return merge_results(*results) else: return merge_results(get_reported_links(sr), get_reported_comments(sr))
def get_rising_items(omit_sr_ids, count=4): """Get links that are rising right now.""" all_rising = rising.get_all_rising() candidate_sr_ids = {sr_id for link, score, sr_id in all_rising}.difference(omit_sr_ids) link_fullnames = [link for link, score, sr_id in all_rising if sr_id in candidate_sr_ids] link_fullnames_to_show = random_sample(link_fullnames, count) rising_links = Link._by_fullname(link_fullnames_to_show, return_dict=False, data=True) rising_items = [ExploreItem(TYPE_RISING, "ris", Subreddit._byID(l.sr_id), l) for l in rising_links] return rising_items
def get_sender_name(message): if getattr(message, 'from_sr', False): return ('/r/%s' % Subreddit._byID(message.sr_id, data=True).name) else: if getattr(message, 'display_author', False): sender_id = message.display_author else: sender_id = message.author_id return '/u/%s' % Account._byID(sender_id, data=True).name
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [ get_comments(author, 'new', 'all'), get_comments(author, 'top', 'all'), get_comments(author, 'controversial', 'all') ] sr = Subreddit._byID(comment.sr_id) with CachedQueryMutator() as m: if comment._deleted: job_key = "delete_items" job.append(get_sr_comments(sr)) m.delete(get_all_comments(), [comment]) else: job_key = "insert_items" if comment._spam: m.insert(get_spam_comments(sr), [comment]) if was_spam_filtered(comment): m.insert(get_spam_filtered_comments(sr), [comment]) if utils.to36(comment.link_id) in g.live_config["fastlane_links"]: amqp.add_item('new_fastlane_comment', comment._fullname) else: amqp.add_item('new_comment', comment._fullname) if not g.amqp_host: add_comment_tree([comment]) job_dict = {job_key: comment} add_queries(job, **job_dict) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments (to minimise lock contention) if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 if inbox_rel._name == "inbox": query = get_inbox_comments(inbox_owner) elif inbox_rel._name == "selfreply": query = get_inbox_selfreply(inbox_owner) else: raise ValueError("wtf is " + inbox_rel._name) if not comment._deleted: m.insert(query, [inbox_rel]) else: m.delete(query, [inbox_rel]) set_unread(comment, inbox_owner, unread=not comment._deleted, mutator=m)
def xml_from_things(things): '''Generate a <batch> XML tree to send to cloudsearch for adding/updating/deleting the given things ''' batch = etree.Element("batch") author_ids = [ thing.author_id for thing in things if hasattr(thing, 'author_id') ] try: accounts = Account._byID(author_ids, data=True, return_dict=True) except NotFound: if USE_SAFE_GET: accounts = safe_get(Account._byID, author_ids, data=True, return_dict=True) else: raise sr_ids = [thing.sr_id for thing in things if hasattr(thing, 'sr_id')] try: srs = Subreddit._byID(sr_ids, data=True, return_dict=True) except NotFound: if USE_SAFE_GET: srs = safe_get(Subreddit._byID, sr_ids, data=True, return_dict=True) else: raise version = _version() for thing in things: try: if thing._spam or thing._deleted: delete_node = delete_xml(thing, version) batch.append(delete_node) elif thing.promoted is None and getattr(thing, "sr_id", None) != -1: add_node = add_xml(thing, version, srs, accounts) batch.append(add_node) except (AttributeError, KeyError): # AttributeError may occur if a needed attribute is somehow missing # from the DB # KeyError will occur for whichever items (if any) triggered the # safe_get() call above, because the needed (but invalid) # Account or Subreddit is missing from the srs or accounts # dictionary # In either case, the sanest approach is to simply not index the # item. If it gets voted on later (or otherwise sent back to the # queue), perhaps it will have been fixed. pass return batch
def get_message_subject(message): sr = Subreddit._byID(message.sr_id, data=True) if message.first_message: first_message = Message._byID(message.first_message, data=True) conversation_subject = first_message.subject else: conversation_subject = message.subject return u"[r/{subreddit} mail]: {subject}".format( subreddit=sr.name, subject=_force_unicode(conversation_subject))
def article_row_extract(self, link): sr = Subreddit._byID(link.sr_id, data=True) row = ( link._id, self.utf8(link.title), self.utf8(link.article), link.author_id, link._date, sr.name ) return row
def get_message_subject(message): sr = Subreddit._byID(message.sr_id, data=True) if message.first_message: first_message = Message._byID(message.first_message, data=True) conversation_subject = first_message.subject else: conversation_subject = message.subject return u"[{brander_community_abbr}/{subreddit} mail]: {subject}".format( subreddit=sr.name, subject=_force_unicode(conversation_subject, brander_community_abbr=g.brander_community_abbr))
def postflight(self): sr_id = getattr(self, "batched_time_srid", None) if not sr_id: return True with make_lock("modify_sr_last_batch_query(%s)" % sr_id): sr = Subreddit._byID(sr_id, data=True) last_batch_query = getattr(sr, "last_batch_query", {}).copy() last_batch_query[self.iden] = datetime.now(g.tz) sr.last_batch_query = last_batch_query sr._commit()
def postflight(self): sr_id = getattr(self, 'batched_time_srid', None) if not sr_id: return True with make_lock('modify_sr_last_batch_query(%s)' % sr_id): sr = Subreddit._byID(sr_id, data=True) last_batch_query = getattr(sr, 'last_batch_query', {}).copy() last_batch_query[self.iden] = datetime.now(g.tz) sr.last_batch_query = last_batch_query sr._commit()
def add_props(cls, user, wrapped): user_fullnames = {w.user_fullname for w in wrapped} target_fullnames = {w.target_fullname for w in wrapped} users = Account._by_fullname(user_fullnames, data=True, return_dict=True) targets = Thing._by_fullname(target_fullnames, data=True, return_dict=True) author_ids = { t.author_id for t in targets.itervalues() if hasattr(t, 'author_id') } link_ids = { t.link_id for t in targets.itervalues() if hasattr(t, 'link_id') } sr_ids = {t.sr_id for t in targets.itervalues() if hasattr(t, 'sr_id')} authors = Account._byID(author_ids, data=True, return_dict=True) links = Link._byID(link_ids, data=True, return_dict=True) subreddits = Subreddit._byID(sr_ids, data=True, return_dict=True) target_things = {} for fullname, target in targets.iteritems(): if isinstance(target, (Comment, Link)): author = authors[target.author_id] if isinstance(target, Link): subreddit = subreddits[target.sr_id] path = target.make_permalink(subreddit) else: link = links[target.link_id] subreddit = subreddits[link.sr_id] path = target.make_permalink(link, subreddit) target_things[fullname] = GameLogTarget( target, path, author, subreddit) elif isinstance(target, Account): target_things[fullname] = WrappedUser(target) for w in wrapped: w.is_self = (c.user_is_loggedin and w.user_fullname == c.user._fullname) w.user = WrappedUser(users[w.user_fullname]) w.target = target_things[w.target_fullname] w.item = g.f2pitems[w.item] w.user_team = scores.get_user_team(users[w.user_fullname]) if isinstance(w.target, WrappedUser): target_user = targets[w.target.fullname] else: target_user = authors[targets[w.target_fullname].author_id] w.target_team = scores.get_user_team(target_user)
def get_rising_items(omit_sr_ids, count=4): """Get links that are rising right now.""" all_rising = rising.get_all_rising() candidate_sr_ids = {sr_id for link, score, sr_id in all_rising}.difference(omit_sr_ids) link_fullnames = [link for link, score, sr_id in all_rising if sr_id in candidate_sr_ids] link_fullnames_to_show = random_sample(link_fullnames, count) rising_links = Link._by_fullname(link_fullnames_to_show, return_dict=False, data=True) rising_items = [ExploreItem(TYPE_RISING, 'ris', Subreddit._byID(l.sr_id), l) for l in rising_links] return rising_items
def _by_srid(things): """Takes a list of things and returns them in a dict separated by sr_id, in addition to the looked-up subreddits""" ret = {} for thing in tup(things): if getattr(thing, 'sr_id', None) is not None: ret.setdefault(thing.sr_id, []).append(thing) srs = Subreddit._byID(ret.keys(), return_dict=True) if ret else {} return ret, srs
def _by_srid(things): """Takes a list of things and returns them in a dict separated by sr_id, in addition to the looked-up subreddits""" ret = {} for thing in tup(things): if getattr(thing, "sr_id", None) is not None: ret.setdefault(thing.sr_id, []).append(thing) srs = Subreddit._byID(ret.keys(), return_dict=True) if ret else {} return ret, srs
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [ get_comments(author, "new", "all"), get_comments(author, "top", "all"), get_comments(author, "controversial", "all"), ] sr = Subreddit._byID(comment.sr_id) with CachedQueryMutator() as m: if comment._deleted: job_key = "delete_items" job.append(get_sr_comments(sr)) m.delete(get_all_comments(), [comment]) else: job_key = "insert_items" if comment._spam: m.insert(get_spam_comments(sr), [comment]) if was_spam_filtered(comment): m.insert(get_spam_filtered_comments(sr), [comment]) if utils.to36(comment.link_id) in g.live_config["fastlane_links"]: amqp.add_item("new_fastlane_comment", comment._fullname) else: amqp.add_item("new_comment", comment._fullname) if not g.amqp_host: add_comment_tree([comment]) job_dict = {job_key: comment} add_queries(job, **job_dict) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments (to minimise lock contention) if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 if inbox_rel._name == "inbox": query = get_inbox_comments(inbox_owner) elif inbox_rel._name == "selfreply": query = get_inbox_selfreply(inbox_owner) else: raise ValueError("wtf is " + inbox_rel._name) if not comment._deleted: m.insert(query, [inbox_rel]) else: m.delete(query, [inbox_rel]) set_unread(comment, inbox_owner, unread=not comment._deleted, mutator=m)
def new_link(link): sr = Subreddit._byID(link.sr_id) author = Account._byID(link.author_id) results = all_queries(get_links, sr, ('hot', 'new', 'old'), ['all']) results.extend(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys())) results.append(get_submitted(author, 'new', 'all')) results.append(get_links(sr, 'toplinks', 'all')) if link._deleted: add_queries(results, delete_item = link) else: add_queries(results, insert_item = link)
def srs_with_live_promos(user, site): srids = srids_from_site(user, site) weights = get_live_promotions(srids) srids = [srid for srid, adweights in weights.iteritems() if adweights] if '' in srids: srs = [Frontpage] srids.remove('') else: srs = [] srs.extend(Subreddit._byID(srids, data=True, return_dict=False)) return srs