def new_report(thing): if isinstance(thing, Link): sr = Subsciteit._byID(thing.sr_id) add_queries([get_reported_links(sr)], insert_items = thing) elif isinstance(thing, Comment): sr = Subsciteit._byID(thing.sr_id) add_queries([get_reported_comments(sr)], insert_items = thing)
def build_sr_tree(root_id): """Builds the tree below this point.""" from r2.models import Subsciteit tree=[root_id] undone=Subsciteit._byID(root_id).children while undone: tree.extend(undone) tmp=Subsciteit._byID(undone) undone=[] for k in tmp: if tmp[k].children: undone.extend(tmp[k].children) return tree
def update_karmas(): for pair in to_update(): user = Account._byID(pair[0], True) sr = Subsciteit._byID(pair[1], True) print user.name, sr.name user.incr_karma('comment', sr, 20)
def get_spam(sr): if isinstance(sr, ModContribSR): srs = Subsciteit._byID(sr.sr_ids(), return_dict=False) results = [ get_spam_links(sr) for sr in srs ] return merge_results(*results) else: return merge_results(get_spam_links(sr), get_spam_comments(sr))
def multi_load_tree(sr_ids): srs = Subsciteit._byID(sr_ids, return_dict = False) res = {} for sr in srs: trees = subsciteit_messages_nocache(sr) if trees: res[sr._id] = trees return res
def get_reported(sr): if isinstance(sr, ModContribSR): srs = Subsciteit._byID(sr.sr_ids(), return_dict=False) results = [] results.extend(get_reported_links(sr) for sr in srs) results.extend(get_reported_comments(sr) for sr in srs) return merge_results(*results) else: return merge_results(get_reported_links(sr), get_reported_comments(sr))
def _by_srid(things,srs=True): """Takes a list of things and returns them in a dict separated by sr_id, in addition to the looked-up subsciteits""" ret = {} for thing in tup(things): if getattr(thing, 'sr_id', None) is not None: ret.setdefault(thing.sr_id, []).append(thing) if srs: _srs = Subsciteit._byID(ret.keys(), return_dict=True) if ret else {} return ret, _srs else: return ret
def run(): #rss = get_sr_rss() #names = rss.keys() #Build tree order, this will be from root to leaves. order=build_sr_tree(Subsciteit._by_name(g.default_sr)._id) #Populate RSS in the other order... order.reverse() for sr_id in order: sr = Subsciteit._byID(sr_id) if sr.rss_source: #ac=Account._byID(srob.moderators[0]) ac=Account._by_name(g.system_user) print "Populating %s as %s using feed |%s|" % (sr.name,ac.name,sr.rss_source) submit_rss_links(sr.name,sr.rss_source,user=ac._id)
def set_last_sr_ban(self, things): by_srid = {} for thing in things: if getattr(thing, 'sr_id', None) is not None: by_srid.setdefault(thing.sr_id, []).append(thing) if by_srid: srs = Subsciteit._byID(by_srid.keys(), data=True, return_dict=True) for sr_id, sr_things in by_srid.iteritems(): sr = srs[sr_id] sr.last_mod_action = datetime.now(g.tz) sr._commit() sr._incr('mod_actions', len(sr_things))
def store_keys(key, maxes): # we're building queries using queries.py, but we could make the # queries ourselves if we wanted to avoid the individual lookups # for accounts and subsciteits. # Note that we're only generating the 'sr-' type queries here, but # we're also able to process the other listings generated by the # old migrate.mr_permacache for convenience userrel_fns = dict(liked = queries.get_liked, disliked = queries.get_disliked, saved = queries.get_saved, hidden = queries.get_hidden) if key.startswith('user-'): acc_str, keytype, account_id = key.split('-') account_id = int(account_id) fn = queries.get_submitted if keytype == 'submitted' else queries.get_comments q = fn(Account._byID(account_id), 'new', 'all') q._insert_tuples([(fname, float(timestamp)) for (timestamp, fname) in maxes]) elif key.startswith('sr-'): sr_str, sort, time, sr_id = key.split('-') sr_id = int(sr_id) if sort == 'controversy': # I screwed this up in the mapper and it's too late to fix # it sort = 'controversial' q = queries.get_links(Subsciteit._byID(sr_id), sort, time) q._insert_tuples([tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.startswith('domain/'): d_str, sort, time, domain = key.split('/') q = queries.get_domain_links(domain, sort, time) q._insert_tuples([tuple([item[-1]] + map(float, item[:-1])) for item in maxes]) elif key.split('-')[0] in userrel_fns: key_type, account_id = key.split('-') account_id = int(account_id) fn = userrel_fns[key_type] q = fn(Account._byID(account_id)) q._insert_tuples([tuple([item[-1]] + map(float, item[:-1])) for item in maxes])
def new_comment(comment, inbox_rels): author = Account._byID(comment.author_id) job = [get_comments(author, 'new', 'all'), get_comments(author, 'top', 'all'), get_comments(author, 'controversial', 'all')] sr = Subsciteit._byID(comment.sr_id) if comment._deleted: job_key = "delete_items" job.append(get_sr_comments(sr)) job.append(get_all_comments()) else: job_key = "insert_items" if comment._spam: job.append(get_spam_comments(sr)) amqp.add_item('new_comment', comment._fullname) if not g.amqp_host: add_comment_tree([comment]) job_dict = { job_key: comment } add_queries(job, **job_dict) # note that get_all_comments() is updated by the amqp process # r2.lib.db.queries.run_new_comments (to minimise lock contention) if inbox_rels: for inbox_rel in tup(inbox_rels): inbox_owner = inbox_rel._thing1 job_dict = { job_key: inbox_rel } if inbox_rel._name == "inbox": inbox_func = get_inbox_comments unread_func = get_unread_comments elif inbox_rel._name == "selfreply": inbox_func = get_inbox_selfreply unread_func = get_unread_selfreply else: raise ValueError("wtf is " + inbox_rel._name) add_queries([inbox_func(inbox_owner)], **job_dict) if comment._deleted: add_queries([unread_func(inbox_owner)], **job_dict) else: set_unread(comment, inbox_owner, True)
def assign_trial(account, juries_already_on, ip, slash16): from r2.models import Jury, Subsciteit, Trial from r2.lib.db import queries defendants_assigned_to = [] for jury in juries_already_on: defendants_assigned_to.append(jury._thing2_id) subscribed_sr_ids = Subsciteit.user_subsciteits(account, ids=True, limit=None) # Pull defendants, except ones which already have lots of juryvotes defs = Trial.all_defendants(quench=True) # Filter out defendants outside this user's subscribed SRs defs = filter (lambda d: d.sr_id in subscribed_sr_ids, defs) # Dictionary of sr_id => SR for all defendants' SRs srs = Subsciteit._byID(set([ d.sr_id for d in defs ])) # Dictionary of sr_id => eligibility bool submit_srs = {} for sr_id, sr in srs.iteritems(): submit_srs[sr_id] = sr.can_submit(account) and not sr._spam # Filter out defendants with ineligible SRs defs = filter (lambda d: submit_srs.get(d.sr_id), defs) likes = queries.get_likes(account, defs) if not g.debug: # Filter out things that the user has upvoted or downvoted defs = filter (lambda d: likes.get((account, d)) is None, defs) # Prefer oldest trials defs.sort(key=lambda x: x._date) for defendant in defs: sr = srs[defendant.sr_id] if voir_dire(account, ip, slash16, defendants_assigned_to, defendant, sr): j = Jury._new(account, defendant) return defendant return None
def new_link(link): "Called on the submission and deletion of links" sr = Subsciteit._byID(link.sr_id) author = Account._byID(link.author_id) results = [get_links(sr, 'new', 'all',no_children=True)] # we don't have to do hot/top/controversy because new_vote will do # that results.append(get_submitted(author, 'new', 'all')) for domain in utils.UrlParser(link.url).domain_permutations(): results.append(get_domain_links(domain, 'new', "all")) if link._spam: results.append(get_spam_links(sr)) add_queries(results, insert_items = link) amqp.add_item('new_link', link._fullname)
def get_modqueue(sr): results = [] if isinstance(sr, ModContribSR): srs = Subsciteit._byID(sr.sr_ids(), return_dict=False) results.append(get_trials_links(srs)) for sr in srs: results.append(get_reported_links(sr)) results.append(get_reported_comments(sr)) results.append(get_spam_links(sr)) results.append(get_spam_comments(sr)) else: results.append(get_trials_links(sr)) results.append(get_reported_links(sr)) results.append(get_reported_comments(sr)) results.append(get_spam_links(sr)) results.append(get_spam_comments(sr)) return merge_results(*results)
def get_trials(sr): if isinstance(sr, ModContribSR): srs = Subsciteit._byID(sr.sr_ids(), return_dict=False) return get_trials_links(srs) else: return get_trials_links(sr)