def gen_keys(): yield promoted_memo_key # just let this one do its own writing load_all_sciteits() yield queries.get_all_comments().iden l_q = Link._query(Link.c._spam == (True, False), Link.c._deleted == (True, False), sort=desc('_date'), data=True, ) for link in fetch_things2(l_q, verbosity): yield comments_key(link._id) yield last_modified_key(link, 'comments') a_q = Account._query(Account.c._spam == (True, False), sort=desc('_date'), ) for account in fetch_things2(a_q, verbosity): yield messages_key(account._id) yield last_modified_key(account, 'overview') yield last_modified_key(account, 'commented') yield last_modified_key(account, 'submitted') yield last_modified_key(account, 'liked') yield last_modified_key(account, 'disliked') yield queries.get_comments(account, 'new', 'all').iden yield queries.get_submitted(account, 'new', 'all').iden yield queries.get_liked(account).iden yield queries.get_disliked(account).iden yield queries.get_hidden(account).iden yield queries.get_saved(account).iden yield queries.get_inbox_messages(account).iden yield queries.get_unread_messages(account).iden yield queries.get_inbox_comments(account).iden yield queries.get_unread_comments(account).iden yield queries.get_inbox_selfreply(account).iden yield queries.get_unread_selfreply(account).iden yield queries.get_sent(account).iden sr_q = Subsciteit._query(Subsciteit.c._spam == (True, False), sort=desc('_date'), ) for sr in fetch_things2(sr_q, verbosity): yield last_modified_key(sr, 'stylesheet_contents') yield queries.get_links(sr, 'hot', 'all').iden yield queries.get_links(sr, 'new', 'all').iden for sort in 'top', 'controversial': for time in 'hour', 'day', 'week', 'month', 'year', 'all': yield queries.get_links(sr, sort, time, merge_batched=False).iden yield queries.get_spam_links(sr).iden yield queries.get_spam_comments(sr).iden yield queries.get_reported_links(sr).iden yield queries.get_reported_comments(sr).iden yield queries.get_subsciteit_messages(sr).iden yield queries.get_unread_subsciteit_messages(sr).iden
def add_all_ban_report_srs(): """Adds the initial spam/reported pages to the report queue""" q = Subsciteit._query(sort = asc('_date')) for sr in fetch_things2(q): add_queries([get_spam_links(sr), get_spam_comments(sr), get_reported_links(sr), get_reported_comments(sr), ])
def add_allow_top_to_srs(): "Add the allow_top property to all stored subsciteits" from r2.models import Subsciteit from r2.lib.db.operators import desc from r2.lib.utils import fetch_things2 q = Subsciteit._query(Subsciteit.c._spam == (True,False), sort = desc('_date')) for sr in fetch_things2(q): sr.allow_top = True; sr._commit()
def add_all_srs(): """Recalculates every listing query for every subsciteit. Very, very slow.""" q = Subsciteit._query(sort = asc('_date')) for sr in fetch_things2(q): for q in all_queries(get_links, sr, ('hot', 'new'), ['all'],no_children=True): q.update() for q in all_queries(get_links, sr, time_filtered_sorts, db_times.keys(),no_children=True): q.update() get_spam_links(sr).update() get_spam_comments(sr).update() get_reported_links(sr).update() get_reported_comments(sr).update()
def popular_searches(): top_sciteits = Subsciteit._query(Subsciteit.c.type == 'public', sort = desc('_downs'), limit = 100, data = True) top_searches = {} for sr in top_sciteits: name = sr.name.lower() for i in xrange(min(len(name), 3)): query = name[:i + 1] r = search_sciteits(query) top_searches[query] = r return top_searches
def load_all_sciteits(): query_cache = {} q = Subsciteit._query(Subsciteit.c.type == 'public', Subsciteit.c._downs > 1, sort = (desc('_downs'), desc('_ups')), data = True) for sr in utils.fetch_things2(q): name = sr.name.lower() for i in xrange(len(name)): prefix = name[:i + 1] names = query_cache.setdefault(prefix, []) if len(names) < 10: names.append(sr.name) for name_prefix, subsciteits in query_cache.iteritems(): SubsciteitsByPartialName._set_values(name_prefix, {'srs': subsciteits})
def cache_lists(): def _chop(srs): srs.sort(key=lambda s: s._downs, reverse=True) return srs[:limit] # bylang =:= dict((lang, over18_state) -> [Subsciteit]) # lang =:= all | lang() # nsfwstate =:= no_over18 | allow_over18 | only_over18 bylang = {} for sr in fetch_things2(Subsciteit._query(sort=desc('_date'), data=True)): aid = getattr(sr, 'author_id', None) if aid is not None and aid < 0: # skip special system sciteits like promos continue type = getattr(sr, 'type', 'private') if type not in ('public', 'restricted'): # skips sciteits that can't appear in the default list # because of permissions continue for lang in 'all', sr.lang: over18s = ['allow_over18'] if sr.over_18: over18s.append('only_over18') else: over18s.append('no_over18') for over18 in over18s: k = (lang, over18) bylang.setdefault(k, []).append(sr) # keep the lists small while we work if len(bylang[k]) > limit*2: bylang[k] = _chop(bylang[k]) for (lang, over18), srs in bylang.iteritems(): srs = _chop(srs) sr_tuples = map(lambda sr: (sr._downs, sr.allow_top, sr._id), srs) print "For %s/%s setting %s" % (lang, over18, map(lambda sr: sr.name, srs[:50])) SubsciteitPopularityByLanguage._set_values(lang, {over18: sr_tuples})
def get_sr_counts(): srs = utils.fetch_things2(Subsciteit._query()) return dict((sr._fullname, sr._ups) for sr in srs)
def get_sr_rss(): """Uses fetch_things2 to get all the subsciteits and there rss feeds""" from r2.models import Subsciteit,FakeSubsciteit srs = [sr for sr in fetch_things2(Subsciteit._query()) if not isinstance(sr,FakeSubsciteit)] return dict((sr.name,sr.rss_source) for sr in srs)