def new_promotion(is_self, title, content, author, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ sr = Subreddit._byID(Subreddit.get_promote_srid()) l = Link._submit( is_self=is_self, title=title, content=content, author=author, sr=sr, ip=ip, ) l.promoted = True l.disable_comments = False l.sendreplies = True PromotionLog.add(l, 'promotion created') update_promote_status(l, PROMOTE_STATUS.unpaid) # the user has posted a promotion, so enable the promote menu unless # they have already opted out if author.pref_show_promote is not False: author.pref_show_promote = True author._commit() # notify of new promo emailer.new_promo(l) return l
def __init__(self): Wrapped.__init__(self) my_reddits = [] sr_ids = Subreddit.user_subreddits(c.user if c.user_is_loggedin else None) if sr_ids: my_reddits = Subreddit._byID(sr_ids, True, return_dict = False) my_reddits.sort(key = lambda sr: sr.name.lower()) drop_down_buttons = [] for sr in my_reddits: drop_down_buttons.append(SubredditButton(sr)) #leaving the 'home' option out for now #drop_down_buttons.insert(0, NamedButton('home', sr_path = False, # css_class = 'top-option', # dest = '/')) drop_down_buttons.append(NamedButton('edit', sr_path = False, css_class = 'bottom-option', dest = '/reddits/')) self.sr_dropdown = SubredditMenu(drop_down_buttons, title = _('my reddits'), type = 'srdrop') pop_reddits = Subreddit.default_srs(c.content_langs, limit = 30) buttons = [SubredditButton(sr) for sr in c.recent_reddits] for sr in pop_reddits: if sr not in c.recent_reddits: buttons.append(SubredditButton(sr)) self.sr_bar = NavMenu(buttons, type='flatlist', separator = '-', _id = 'sr-bar')
def new_promotion(title, url, selftext, user, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ sr = Subreddit._byID(Subreddit.get_promote_srid()) l = Link._submit(title, url, user, sr, ip) l.promoted = True l.disable_comments = False l.sendreplies = True PromotionLog.add(l, 'promotion created') if url == 'self': l.url = l.make_permalink_slow() l.is_self = True l.selftext = selftext l._commit() update_promote_status(l, PROMOTE_STATUS.unpaid) # the user has posted a promotion, so enable the promote menu unless # they have already opted out if user.pref_show_promote is not False: user.pref_show_promote = True user._commit() # notify of new promo emailer.new_promo(l) return l
def get_subreddit(self): """checks if the current url refers to a subreddit and returns that subreddit object. The cases here are: * the hostname is unset or is g.domain, in which case it looks for /r/XXXX or /subreddits. The default in this case is Default. * the hostname is a cname to a known subreddit. On failure to find a subreddit, returns None. """ from pylons import g from r2.models import Subreddit, Sub, NotFound, DefaultSR try: if (not self.hostname or is_subdomain(self.hostname, g.domain) or self.hostname.startswith(g.domain)): if self.path.startswith('/r/'): return Subreddit._by_name(self.path.split('/')[2]) elif self.path.startswith(('/subreddits/', '/reddits/')): return Sub else: return DefaultSR() elif self.hostname: return Subreddit._by_domain(self.hostname) except NotFound: pass return None
def new_report(thing): if isinstance(thing, Link): sr = Subreddit._byID(thing.sr_id) add_queries([get_reported_links(sr)], insert_items = thing) elif isinstance(thing, Comment): sr = Subreddit._byID(thing.sr_id) add_queries([get_reported_comments(sr)], insert_items = thing)
def get_test_user(self): account = Account._byID(1, data=True) if not account.email: account.email = '*****@*****.**' account._commit() c.content_langs = ['en'] Subreddit.subscribe_defaults(account) return account
def backfill(after=None): q = Subreddit._query(sort=asc('_date')) if after: sr = Subreddit._by_name(after) q = q._after(sr) for sr in fetch_things2(q): backfill_sr(sr)
def srids_from_site(user, site): if not isinstance(site, FakeSubreddit): srids = {site._id} elif isinstance(site, MultiReddit): srids = set(site.sr_ids) elif user and not isinstance(user, FakeAccount): srids = set(Subreddit.user_subreddits(user, ids=True) + [""]) else: srids = set(Subreddit.user_subreddits(None, ids=True) + [""]) return srids
def ensure_subreddit(name, author): """Look up or create a subreddit and return it.""" try: sr = Subreddit._by_name(name) print ">> found /r/{}".format(name) return sr except NotFound: print ">> creating /r/{}".format(name) sr = Subreddit._new(name=name, title="/r/{}".format(name), author_id=author._id, lang="en", ip="127.0.0.1") sr._commit() return sr
def get_promotion_list(user, site): if not isinstance(site, FakeSubreddit): srids = set([site._id]) elif isinstance(site, MultiReddit): srids = set(site.sr_ids) elif user and not isinstance(user, FakeAccount): srids = set(Subreddit.reverse_subscriber_ids(user) + [""]) else: srids = set(Subreddit.user_subreddits(None, ids=True) + [""]) tuples = get_promotion_list_cached(srids) return [PromoTuple(*t) for t in tuples]
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET', 'HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = set(Subreddit._by_name(sr_names, stale=can_stale).values()) if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if len(srs) == 0: c.site = MultiReddit([], sr_name) elif len(srs) == 1: c.site = srs.pop() else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, sr_name) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith("/api/login/") : abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def srnames_from_site(user, site): if not isinstance(site, FakeSubreddit): srnames = {site.name} elif isinstance(site, MultiReddit): srnames = {sr.name for sr in site.srs} elif user and not isinstance(user, FakeAccount): srnames = {sr.name for sr in Subreddit.user_subreddits(user, ids=False)} srnames.add(Frontpage.name) else: srnames = {sr.name for sr in Subreddit.user_subreddits(None, ids=False)} srnames.add(Frontpage.name) return srnames
def subs_contribs(sr_name = 'betateam'): """Convert all subscribers of a given subreddit to contributors. Useful for forming opt-in beta teams""" from r2.models import Subreddit, SRMember sr = Subreddit._by_name(sr_name) q = SRMember._query(SRMember.c._thing1_id == sr._id) for rel in rels: if rel._name == 'subscriber': sr.add_contributor(rel._thing2) Subreddit.special_reddits(rel._thing2, 'contributor', _update=True)
def get_promotion_list(user, site): # site is specified, pick an ad from that site if not isinstance(site, FakeSubreddit): srids = set([site._id]) elif isinstance(site, MultiReddit): srids = set(site.sr_ids) # site is Fake, user is not. Pick based on their subscriptions. elif user and not isinstance(user, FakeAccount): srids = set(Subreddit.reverse_subscriber_ids(user) + [""]) # both site and user are "fake" -- get the default subscription list else: srids = set(Subreddit.user_subreddits(None, True) + [""]) return get_promotions_cached(srids)
def assign_trial(account, ip, slash16): from r2.models import Jury, Subreddit, Trial from r2.lib.db import queries defendants_voted_upon = [] defendants_assigned_to = [] for jury in Jury.by_account(account): defendants_assigned_to.append(jury._thing2_id) if jury._name != '0': defendants_voted_upon.append(jury._thing2_id) subscribed_sr_ids = Subreddit.user_subreddits(account, ids=True, limit=None) # Pull defendants, except ones which already have lots of juryvotes defs = Trial.all_defendants(quench=True) # Filter out defendants outside this user's subscribed SRs defs = filter (lambda d: d.sr_id in subscribed_sr_ids, defs) # Dictionary of sr_id => SR for all defendants' SRs srs = Subreddit._byID(set([ d.sr_id for d in defs ])) # Dictionary of sr_id => eligibility bool submit_srs = {} for sr_id, sr in srs.iteritems(): submit_srs[sr_id] = sr.can_submit(account) and not sr._spam # Filter out defendants with ineligible SRs defs = filter (lambda d: submit_srs.get(d.sr_id), defs) likes = queries.get_likes(account, defs) if not g.debug: # Filter out things that the user has upvoted or downvoted defs = filter (lambda d: likes.get((account, d)) is None, defs) # Prefer oldest trials defs.sort(key=lambda x: x._date) for defendant in defs: sr = srs[defendant.sr_id] if voir_dire(account, ip, slash16, defendants_voted_upon, defendant, sr): if defendant._id not in defendants_assigned_to: j = Jury._new(account, defendant) return defendant return None
def run(self, path): if not self.required and not path: return adhoc_multi_rx = is_adhoc_multi_rx.match(path) if is_multi_rx.match(path): return VMultiByPath(self.param, kinds=("m")).run(path) elif adhoc_multi_rx: sr_strings = adhoc_multi_rx.groups()[0].split("+") srs = Subreddit._by_name(sr_strings, stale=True).values() return MultiReddit(path, srs) else: try: return Subreddit._by_name(path) except NotFound: self.set_error(errors.INVALID_SITE_PATH)
def get_predicted_pageviews(srs, start, end): srs, is_single = tup(srs, ret_is_single=True) sr_names = [sr.name for sr in srs] # default subreddits require a different inventory factor content_langs = [g.site_lang] default_srids = Subreddit.top_lang_srs(content_langs, limit=g.num_default_reddits, filter_allow_top=True, over18=False, ids=True) # prediction does not vary by date daily_inventory = PromoMetrics.get(MIN_DAILY_CASS_KEY, sr_names=sr_names) dates = get_date_range(start, end) ret = {} for sr in srs: if not isinstance(sr, FakeSubreddit) and sr._id in default_srids: factor = DEFAULT_INVENTORY_FACTOR else: factor = INVENTORY_FACTOR sr_daily_inventory = daily_inventory.get(sr.name, 0) * factor sr_daily_inventory = int(sr_daily_inventory) ret[sr.name] = dict.fromkeys(dates, sr_daily_inventory) if is_single: return ret[srs[0].name] else: return ret
def add_all_srs(): """Adds every listing query for every subreddit to the queue.""" q = Subreddit._query(sort = asc('_date')) for sr in fetch_things2(q): add_queries(all_queries(get_links, sr, ('hot', 'new', 'old'), ['all'])) add_queries(all_queries(get_links, sr, ('top', 'controversial'), db_times.keys())) add_queries([get_links(sr, 'toplinks', 'all')])
def update_flair_counts(): flairs = Counter() user_ids = [] sr = Subreddit._byID(g.live_config["thebutton_srid"], data=True) raw = AccountsActiveBySR._cf.xget(sr._id36) for uid, _ in raw: user_ids.append(uid) for user_chunk in in_chunks(user_ids, size=100): users = Account._byID36(user_chunk, data=True, return_dict=False) for user in users: flair = user.flair_css_class(sr._id) if not flair: if user._date < ACCOUNT_CREATION_CUTOFF: flair = "no-press" else: flair = "cant-press" flairs[flair] += 1 if 'cheater' in flairs: del flairs['cheater'] sr.flair_counts = sorted( flairs.iteritems(), key=lambda x: 'z' if x[0] == 'no-press' else x[0], reverse=True) sr._commit()
def update_karmas(): for pair in to_update(): user = Account._byID(pair[0], True) sr = Subreddit._byID(pair[1], True) print user.name, sr.name user.incr_karma('comment', sr, 20)
def filter_links(links, filter_spam = False, multiple = True): # run the list through a builder to remove any that the user # isn't allowed to see from pylons import c from r2.models import IDBuilder, Link, Subreddit, NotFound links = IDBuilder([link._fullname for link in links], skip = False).get_items()[0] if not links: return if filter_spam: # first, try to remove any spam links_nonspam = [ link for link in links if not link._spam ] if links_nonspam: links = links_nonspam # if it occurs in one or more of their subscriptions, show them # that one first subs = set(Subreddit.user_subreddits(c.user, limit = None)) def cmp_links(a, b): if a.sr_id in subs and b.sr_id not in subs: return -1 elif a.sr_id not in subs and b.sr_id in subs: return 1 else: return cmp(b._hot, a._hot) links = sorted(links, cmp = cmp_links) # among those, show them the hottest one return links if multiple else links[0]
def get_adzerk_promo(user, site): srids = promote.has_live_promos(user, site) if not srids: return if '' in srids: srnames = [Frontpage.name] srids.remove('') else: srnames = [] srs = Subreddit._byID(srids, data=True, return_dict=False) srnames.extend([sr.name for sr in srs]) response = adzerk_request(srnames) if not response: return promo_tuples = [promote.PromoTuple(response.link, 1., response.campaign)] builder = CampaignBuilder(promo_tuples, keep_fn=organic.keep_fresh_links) promoted_links = builder.get_items()[0] if promoted_links: w = promoted_links[0] w.adserver_imp_pixel = response.imp_pixel w.adserver_click_url = response.click_url return w
def migrate_scan_adjustments(self): adjs = KarmaAdjustment._query(data=True) for adj in adjs: sr = Subreddit._byID(adj.sr_id) gravity = 'ups' if adj.amount >= 0 else 'downs' key = 'karma_{0}_adjustment_{1}'.format(gravity, sr.name) self.new_values[adj.account_id][key] += abs(adj.amount)
def new_promotion(title, url, user, ip): """ Creates a new promotion with the provided title, etc, and sets it status to be 'unpaid'. """ sr = Subreddit._byID(get_promote_srid()) l = Link._submit(title, url, user, sr, ip) l.promoted = True l.disable_comments = False PromotionLog.add(l, 'promotion created') l._commit() # set the status of the link, populating the query queue if c.user_is_sponsor or user.trusted_sponsor: set_promote_status(l, PROMOTE_STATUS.accepted) else: set_promote_status(l, PROMOTE_STATUS.unpaid) # the user has posted a promotion, so enable the promote menu unless # they have already opted out if user.pref_show_promote is not False: user.pref_show_promote = True user._commit() # notify of new promo emailer.new_promo(l) return l
def get_reported(sr): if isinstance(sr, ModContribSR): srs = Subreddit._byID(sr.sr_ids(), return_dict=False) results = [get_reported_links(sr) for sr in srs] return merge_results(*results) else: return get_reported_links(sr)
def normalized_hot_cached(sr_ids): results = [] srs = Subreddit._byID(sr_ids, data = True, return_dict = False) for sr in srs: #items = get_hot(sr) items = filter(lambda l: l._date > utils.timeago('%d day' % g.HOT_PAGE_AGE), get_hot(sr)) if not items: continue top_score = max(items[0]._hot, 1) top, rest = items[:2], items[2:] if top: normals = [l._hot / top_score for l in top] results.extend((l, random.choice(normals)) for l in top) #random.shuffle(normals) #results.extend((l, normals.pop()) for l in top) if rest: results.extend((l, l._hot / top_score) for l in rest) results.sort(key = lambda x: (x[1], x[0]._hot), reverse = True) return [l[0]._fullname for l in results]
def get_recommendations(srs, count=10, source=SRC_MULTIREDDITS, to_omit=None, match_set=True, over18=False): """Return subreddits recommended if you like the given subreddits. Args: - srs is one Subreddit object or a list of Subreddits - count is total number of results to return - source is a prefix telling which set of recommendations to use - to_omit is a single or list of subreddit id36s that should not be be included. (Useful for omitting recs that were already rejected.) - match_set=True will return recs that are similar to each other, useful for matching the "theme" of the original set - over18 content is filtered unless over18=True or one of the original srs is over18 """ srs = tup(srs) to_omit = tup(to_omit) if to_omit else [] # fetch more recs than requested because some might get filtered out rec_id36s = SRRecommendation.for_srs([sr._id36 for sr in srs], to_omit, count * 2, source, match_set=match_set) # always check for private subreddits at runtime since type might change rec_srs = Subreddit._byID36(rec_id36s, return_dict=False) filtered = [sr for sr in rec_srs if is_visible(sr)] # don't recommend adult srs unless one of the originals was over_18 if not over18 and not any(sr.over_18 for sr in srs): filtered = [sr for sr in filtered if not sr.over_18] return filtered[:count]
def create_sr(self): subreddit = None print "attempting to create sr for %s" % self.name for name in self._generate_sr_name(): try: subreddit = Subreddit._new( name=name, title=self.name[:100], author_id=Account.system_user()._id, ip='0.0.0.0', type='private', ) break except SubredditExists: print 'subreddit %s already exists' % name continue except ValueError: print 'bad subreddit name, giving up: %s' % name return subreddit else: print "gave up attempting to create sr for %s" % self.name return subreddit self.subreddit_name = subreddit.name self._commit() return subreddit
def get_links(cls, event_id): link_ids = cls._get_related_link_ids(event_id) links = Link._byID(link_ids, data=True, return_dict=False) links.sort(key=lambda L: L.num_comments, reverse=True) sr_ids = set(L.sr_id for L in links) subreddits = Subreddit._byID(sr_ids, data=True) wrapped = [] for link in links: w = Wrapped(link) if w._spam or w._deleted: continue if not getattr(w, "allow_liveupdate", True): continue w.subreddit = subreddits[link.sr_id] # ideally we'd check if the user can see the subreddit, but by # doing this we keep everything user unspecific which makes caching # easier. if w.subreddit.type == "private": continue comment_label = ungettext("comment", "comments", link.num_comments) w.comments_label = strings.number_label % dict( num=link.num_comments, thing=comment_label) wrapped.append(w) return wrapped
def POST_report(self, form, jquery, report_type): """Report the thread for violating the rules of reddit.""" if form.has_errors("type", errors.INVALID_OPTION): return if c.user._spam or c.user.ignorereports: return already_reported = LiveUpdateReportsByAccount.get_report( c.user, c.liveupdate_event) if already_reported: self.abort403() LiveUpdateReportsByAccount.create( c.user, c.liveupdate_event, type=report_type) queries.report_event(c.liveupdate_event) try: default_subreddit = Subreddit._by_name(g.default_sr) except NotFound: pass else: not_yet_reported = g.cache.add( "lu_reported_" + str(c.liveupdate_event._id), 1, time=3600) if not_yet_reported: send_system_message( default_subreddit, subject="live thread reported", body=REPORTED_MESSAGE % { "title": c.liveupdate_event.title, "url": "/live/" + c.liveupdate_event._id, "reason": pages.REPORT_TYPES[report_type], }, )
def make_daily_promotions(offset=0, test=False): """ Arguments: offset - number of days after today to get the schedule for test - if True, new schedule will be generated but not launched Raises Exception with list of campaigns that had errors if there were any """ scheduled_adweights, error_campaigns = get_scheduled(offset) current_adweights_byid = get_live_promotions([LiveAdWeights.ALL_ADS]) current_adweights = current_adweights_byid[LiveAdWeights.ALL_ADS] link_names = [aw.link for aw in itertools.chain(scheduled_adweights, current_adweights)] links = Link._by_fullname(link_names, data=True) camp_names = [aw.campaign for aw in itertools.chain(scheduled_adweights, current_adweights)] campaigns = PromoCampaign._by_fullname(camp_names, data=True) srs = Subreddit._by_name([camp.sr_name for camp in campaigns.itervalues() if camp.sr_name]) expired_links = ({aw.link for aw in current_adweights} - {aw.link for aw in scheduled_adweights}) for link_name in expired_links: link = links[link_name] if is_promoted(link): if test: print "unpromote", link_name else: # update the query queue set_promote_status(link, PROMOTE_STATUS.finished) emailer.finished_promo(link) by_srid = defaultdict(list) for adweight in scheduled_adweights: link = links[adweight.link] campaign = campaigns[adweight.campaign] if campaign.sr_name: sr = srs[campaign.sr_name] sr_id = sr._id sr_over_18 = sr.over_18 else: sr_id = '' sr_over_18 = False if sr_over_18: if test: print "over18", link._fullname else: link.over_18 = True link._commit() if is_accepted(link) and not is_promoted(link): if test: print "promote2", link._fullname else: # update the query queue set_promote_status(link, PROMOTE_STATUS.promoted) emailer.live_promo(link) by_srid[sr_id].append(adweight) if not test: set_live_promotions(by_srid) _mark_promos_updated() else: print by_srid finalize_completed_campaigns(daysago=offset+1) hooks.get_hook('promote.make_daily_promotions').call(offset=offset) # after launching as many campaigns as possible, raise an exception to # report any error campaigns. (useful for triggering alerts in irc) if error_campaigns: raise Exception("Some scheduled campaigns could not be added to daily " "promotions: %r" % error_campaigns)
def test_run_srs(*sr_names): '''Inject Subreddits by name into the index''' srs = Subreddit._by_name(sr_names).values() uploader = SubredditUploader(g.CLOUDSEARCH_SUBREDDIT_DOC_API, things=srs) return uploader.inject()
def get_tree(self): if self.parent: return conversation(self.user, self.parent) sr_ids = Subreddit.reverse_moderator_ids(self.user) return moderator_messages(sr_ids)
def get_discovery_srid36s(): """Get list of srs that help people discover other srs.""" srs = Subreddit._by_name(g.live_config['discovery_srs']) return [sr._id36 for sr in srs.itervalues()]
def get_sr_counts(): srs = utils.fetch_things2(Subreddit._query(sort=desc("_date"))) return dict((sr._fullname, sr._ups) for sr in srs)
def get_dfp_subreddit(): from r2.models import Subreddit return Subreddit._byID(Subreddit.get_promote_srid())
def preprocess_query(self, query): # Expand search for /r/subreddit to include subreddit name. sr = query.strip('/').split('/') if len(sr) == 2 and sr[0] == 'r' and Subreddit.is_valid_name(sr[1]): query = '"%s" | %s' % (query, sr[1]) return query
def add_props(cls, user, wrapped): from r2.lib.db.thing import Thing from r2.lib.menus import QueryButton from r2.lib.pages import WrappedUser target_names = { item.target_fullname for item in wrapped if hasattr(item, "target_fullname") } targets = Thing._by_fullname(target_names, data=True) # get moderators moderators = Account._byID36({item.mod_id36 for item in wrapped}, data=True) # get authors for targets that are Links or Comments target_author_names = { target.author_id for target in targets.values() if hasattr(target, "author_id") } target_authors = Account._byID(target_author_names, data=True) # get parent links for targets that are Comments parent_link_names = { target.link_id for target in targets.values() if hasattr(target, "link_id") } parent_links = Link._byID(parent_link_names, data=True) # get subreddits srs = Subreddit._byID36({item.sr_id36 for item in wrapped}, data=True) for item in wrapped: item.moderator = moderators[item.mod_id36] item.subreddit = srs[item.sr_id36] item.text = cls._text.get(item.action, '') item.details = item.get_extra_text() item.target = None item.target_author = None if hasattr(item, "target_fullname") and item.target_fullname: item.target = targets[item.target_fullname] if hasattr(item.target, "author_id"): author_name = item.target.author_id item.target_author = target_authors[author_name] if hasattr(item.target, "link_id"): parent_link_name = item.target.link_id item.parent_link = parent_links[parent_link_name] if c.render_style == "html": request_path = request.path # make wrapped users for targets that are accounts user_targets = filter(lambda target: isinstance(target, Account), targets.values()) wrapped_user_targets = { user._fullname: WrappedUser(user) for user in user_targets } for item in wrapped: if isinstance(item.target, Account): user_name = item.target._fullname item.wrapped_user_target = wrapped_user_targets[user_name] css_class = 'modactions %s' % item.action action_button = QueryButton('', item.action, query_param='type', css_class=css_class) action_button.build(base_path=request_path) item.action_button = action_button mod_button = QueryButton(item.moderator.name, item.moderator.name, query_param='mod') mod_button.build(base_path=request_path) item.mod_button = mod_button if isinstance(c.site, ModSR) or isinstance( c.site, MultiReddit): item.bgcolor = 'rgb(%s,%s,%s)' % cls.get_rgb(item) else: item.bgcolor = "rgb(255,255,255)"
def subreddits_with_promos(cls): sr_ids = LiveAdWeights.get_live_subreddits() srs = Subreddit._byID(sr_ids, return_dict=False) sr_names = sorted([sr.name for sr in srs], key=lambda s: s.lower()) return sr_names
def test_run_srs(*sr_names): '''Inject Subreddits by name into the index''' srs = Subreddit._by_name(sr_names).values() uploader = SolrSubredditUploader(things=srs) return uploader.inject()
def should_index(self, thing): return thing._id != Subreddit.get_promote_srid()
def add_props(cls, user, wrapped): from r2.lib.menus import NavButton from r2.lib.db.thing import Thing from r2.lib.pages import WrappedUser from r2.lib.filters import _force_unicode TITLE_MAX_WIDTH = 50 request_path = request.path target_fullnames = [ item.target_fullname for item in wrapped if hasattr(item, 'target_fullname') ] targets = Thing._by_fullname(target_fullnames, data=True) authors = Account._byID( [t.author_id for t in targets.values() if hasattr(t, 'author_id')], data=True) links = Link._byID( [t.link_id for t in targets.values() if hasattr(t, 'link_id')], data=True) sr_ids = set( [t.sr_id for t in targets.itervalues() if hasattr(t, 'sr_id')] + [w.sr_id for w in wrapped]) subreddits = Subreddit._byID(sr_ids, data=True) # Assemble target links target_links = {} target_accounts = {} for fullname, target in targets.iteritems(): if isinstance(target, Link): author = authors[target.author_id] title = _force_unicode(target.title) if len(title) > TITLE_MAX_WIDTH: short_title = title[:TITLE_MAX_WIDTH] + '...' else: short_title = title text = '%(link)s "%(title)s" %(by)s %(author)s' % { 'link': _('link'), 'title': short_title, 'by': _('by'), 'author': author.name } path = target.make_permalink(subreddits[target.sr_id]) target_links[fullname] = (text, path, title) elif isinstance(target, Comment): author = authors[target.author_id] link = links[target.link_id] title = _force_unicode(link.title) if len(title) > TITLE_MAX_WIDTH: short_title = title[:TITLE_MAX_WIDTH] + '...' else: short_title = title text = '%(comment)s %(by)s %(author)s %(on)s "%(title)s"' % { 'comment': _('comment'), 'by': _('by'), 'author': author.name, 'on': _('on'), 'title': short_title } path = target.make_permalink(link, subreddits[link.sr_id]) target_links[fullname] = (text, path, title) elif isinstance(target, Account): target_accounts[fullname] = WrappedUser(target) for item in wrapped: # Can I move these buttons somewhere else? Not great to have request stuff in here css_class = 'modactions %s' % item.action item.button = NavButton('', item.action, opt='type', css_class=css_class) item.button.build(base_path=request_path) mod_name = item.author.name item.mod = NavButton(mod_name, mod_name, opt='mod') item.mod.build(base_path=request_path) item.text = ModAction._text.get(item.action, '') item.details = item.get_extra_text() if hasattr(item, 'target_fullname') and item.target_fullname: target = targets[item.target_fullname] if isinstance(target, Account): item.target_wrapped_user = target_accounts[ item.target_fullname] elif isinstance(target, Link) or isinstance(target, Comment): item.target_text, item.target_path, item.target_title = target_links[ item.target_fullname] item.bgcolor = ModAction.get_rgb(item.sr_id) item.sr_name = subreddits[item.sr_id].name item.sr_path = subreddits[item.sr_id].path Printable.add_props(user, wrapped)
def get_live_subreddit_stylesheets(): """List all currently visible subreddit stylesheet files.""" subreddits = Subreddit._query(sort=desc("_date")) for sr in fetch_things2(subreddits): if sr.stylesheet_is_static: yield sr.static_stylesheet_name
def get_recommended_content(prefs, src, settings): """Get a mix of content from subreddits recommended for someone with the given preferences (likes and dislikes.) Returns a list of ExploreItems. """ # numbers chosen empirically to give enough results for explore page num_liked = 10 # how many liked srs to use when generating the recs num_recs = 20 # how many recommended srs to ask for num_discovery = 2 # how many discovery-related subreddits to mix in num_rising = 4 # how many rising links to mix in num_items = 20 # total items to return rising_items = discovery_items = comment_items = hot_items = [] # make a list of srs that shouldn't be recommended default_srid36s = [to36(srid) for srid in Subreddit.default_subreddits()] omit_srid36s = list( prefs.likes.union(prefs.dislikes, prefs.recent_views, default_srid36s)) # pick random subset of the user's liked srs liked_srid36s = random_sample(prefs.likes, num_liked) if settings.personalized else [] # pick random subset of discovery srs candidates = set(get_discovery_srid36s()).difference(prefs.dislikes) discovery_srid36s = random_sample(candidates, num_discovery) # multiget subreddits to_fetch = liked_srid36s + discovery_srid36s srs = Subreddit._byID36(to_fetch) liked_srs = [srs[sr_id36] for sr_id36 in liked_srid36s] discovery_srs = [srs[sr_id36] for sr_id36 in discovery_srid36s] if settings.personalized: # generate recs from srs we know the user likes recommended_srs = get_recommendations(liked_srs, count=num_recs, to_omit=omit_srid36s, source=src, match_set=False, over18=settings.nsfw) random.shuffle(recommended_srs) # split list of recommended srs in half midpoint = len(recommended_srs) / 2 srs_slice1 = recommended_srs[:midpoint] srs_slice2 = recommended_srs[midpoint:] # get hot links plus top comments from one half comment_items = get_comment_items(srs_slice1, src) # just get hot links from the other half hot_items = get_hot_items(srs_slice2, TYPE_HOT, src) if settings.discovery: # get links from subreddits dedicated to discovery discovery_items = get_hot_items(discovery_srs, TYPE_DISCOVERY, 'disc') if settings.rising: # grab some (non-personalized) rising items omit_sr_ids = set(int(id36, 36) for id36 in omit_srid36s) rising_items = get_rising_items(omit_sr_ids, count=num_rising) # combine all items and randomize order to get a mix of types all_recs = list( chain(rising_items, comment_items, discovery_items, hot_items)) random.shuffle(all_recs) # make sure subreddits aren't repeated seen_srs = set() recs = [] for r in all_recs: if not settings.nsfw and r.is_over18(): continue if not is_visible(r.sr): # could happen in rising items continue if r.sr._id not in seen_srs: recs.append(r) seen_srs.add(r.sr._id) if len(recs) >= num_items: break return recs
def get_sr_counts(period=count_period): srs = Subreddit._query() return dict((l._fullname, (0, l.sr_id)) for l in links)
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) ensure_subreddit(g.beta_sr, system_user) ensure_subreddit(g.promo_sr_name, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("videos"), modeler.model_subreddit("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) url = sr_model.generate_link_url() is_self = (url == "self") content = sr_model.generate_selfpost_body() if is_self else url link = Link._submit( is_self=is_self, title=sr_model.generate_link_title(), content=content, author=link_author, sr=sr, ip="127.0.0.1", ) queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([ Vote.DIRECTIONS.up, Vote.DIRECTIONS.unvote, Vote.DIRECTIONS.down, ]) voter = random.choice(accounts) cast_vote(voter, thing, direction) amqp.worker.join() srs = [Subreddit._by_name(n) for n in ("pics", "videos", "askhistorians")] LocalizedDefaultSubreddits.set_global_srs(srs) LocalizedFeaturedSubreddits.set_global_srs([Subreddit._by_name('pics')])
def pre(self): record_timings = g.admin_cookie in request.cookies or g.debug admin_bar_eligible = response.content_type == 'text/html' if admin_bar_eligible and record_timings: g.stats.start_logging_timings() # set up stuff needed in base templates at error time here. c.js_preload = JSPreload() MinimalController.pre(self) set_cnameframe() # populate c.cookies unless we're on the unsafe media_domain if request.host != g.media_domain or g.media_domain == g.domain: cookie_counts = collections.Counter() try: for k, v in request.cookies.iteritems(): # minimalcontroller can still set cookies if k not in c.cookies: # we can unquote even if it's not quoted c.cookies[k] = Cookie(value=unquote(v), dirty=False) cookie_counts[Cookie.classify(k)] += 1 except CookieError: #pylons or one of the associated retarded libraries #can't handle broken cookies request.environ['HTTP_COOKIE'] = '' for cookietype, count in cookie_counts.iteritems(): g.stats.simple_event("cookie.%s" % cookietype, count) delete_obsolete_cookies() # the user could have been logged in via one of the feeds maybe_admin = False is_otpcookie_valid = False # no logins for RSS feed unless valid_feed has already been called if not c.user: if c.extension != "rss": authenticate_user() admin_cookie = c.cookies.get(g.admin_cookie) if c.user_is_loggedin and admin_cookie: maybe_admin, first_login = valid_admin_cookie( admin_cookie.value) if maybe_admin: self.enable_admin_mode(c.user, first_login=first_login) else: self.disable_admin_mode(c.user) otp_cookie = read_user_cookie(g.otp_cookie) if c.user_is_loggedin and otp_cookie: is_otpcookie_valid = valid_otp_cookie(otp_cookie) if not c.user: c.user = UnloggedUser(get_browser_langs()) # patch for fixing mangled language preferences if (not isinstance(c.user.pref_lang, basestring) or not all( isinstance(x, basestring) for x in c.user.pref_content_langs)): c.user.pref_lang = g.lang c.user.pref_content_langs = [g.lang] c.user._commit() if c.user_is_loggedin: if not c.user._loaded: c.user._load() c.modhash = c.user.modhash() if hasattr(c.user, 'msgtime') and c.user.msgtime: c.have_messages = c.user.msgtime c.show_mod_mail = Subreddit.reverse_moderator_ids(c.user) c.have_mod_messages = getattr(c.user, "modmsgtime", False) c.user_is_admin = maybe_admin and c.user.name in g.admins c.user_special_distinguish = c.user.special_distinguish() c.user_is_sponsor = c.user_is_admin or c.user.name in g.sponsors c.otp_cached = is_otpcookie_valid if not isinstance(c.site, FakeSubreddit) and not g.disallow_db_writes: c.user.update_sr_activity(c.site) c.over18 = over18() set_obey_over18() # looking up the multireddit requires c.user. set_multireddit() #set_browser_langs() set_host_lang() set_iface_lang() set_content_lang() set_recent_clicks() # used for HTML-lite templates set_colors() # set some environmental variables in case we hit an abort if not isinstance(c.site, FakeSubreddit): request.environ['REDDIT_NAME'] = c.site.name # random reddit trickery -- have to do this after the content lang is set if c.site == Random: c.site = Subreddit.random_reddit(user=c.user) redirect_to("/" + c.site.path.strip('/') + request.path_qs) elif c.site == RandomSubscription: if c.user.gold: c.site = Subreddit.random_subscription(c.user) redirect_to('/' + c.site.path.strip('/') + request.path_qs) else: redirect_to('/gold/about') elif c.site == RandomNSFW: c.site = Subreddit.random_reddit(over18=True, user=c.user) redirect_to("/" + c.site.path.strip('/') + request.path_qs) if not request.path.startswith("/api/login/"): # is the subreddit banned? if c.site.spammy() and not c.user_is_admin and not c.error_page: ban_info = getattr(c.site, "ban_info", {}) if "message" in ban_info: message = ban_info['message'] else: sitelink = url_escape(add_sr("/")) subject = ("/r/%s has been incorrectly banned" % c.site.name) link = ("/r/redditrequest/submit?url=%s&title=%s" % (sitelink, subject)) message = strings.banned_subreddit_message % dict( link=link) errpage = pages.RedditError(strings.banned_subreddit_title, message, image="subreddit-banned.png") request.environ['usable_error_content'] = errpage.render() self.abort404() # check if the user has access to this subreddit if not c.site.can_view(c.user) and not c.error_page: if isinstance(c.site, LabeledMulti): # do not leak the existence of multis via 403. self.abort404() else: public_description = c.site.public_description errpage = pages.RedditError( strings.private_subreddit_title, strings.private_subreddit_message, image="subreddit-private.png", sr_description=public_description, ) request.environ['usable_error_content'] = errpage.render() self.abort403() #check over 18 if (c.site.over_18 and not c.over18 and request.path not in ("/frame", "/over18") and c.render_style == 'html'): return self.intermediate_redirect("/over18") #check whether to allow custom styles c.allow_styles = True c.can_apply_styles = self.allow_stylesheets if g.css_killswitch: c.can_apply_styles = False #if the preference is set and we're not at a cname elif not c.user.pref_show_stylesheets and not c.cname: c.can_apply_styles = False #if the site has a cname, but we're not using it elif c.site.domain and c.site.css_on_cname and not c.cname: c.can_apply_styles = False c.bare_content = request.GET.pop('bare', False) c.show_admin_bar = admin_bar_eligible and (c.user_is_admin or g.debug) if not c.show_admin_bar: g.stats.end_logging_timings() hooks.get_hook("reddit.request.begin").call() c.request_timer.intermediate("base-pre")
def get_trials(sr): if isinstance(sr, (ModContribSR, MultiReddit)): srs = Subreddit._byID(sr.sr_ids, return_dict=False) return get_trials_links(srs) else: return get_trials_links(sr)
def test_subreddit_logged_out(self): srname = "test1" subreddit = Subreddit(name=srname) srnames = srnames_from_site(self.logged_out, subreddit) self.assertEqual(srnames, {srname})
def create_about_post(): user = Account._by_name('Eliezer_Yudkowsky') sr = Subreddit._by_name('admin') link = Link._submit('About LessWrong', 'TBC', 'self', user, sr, '::1', [])
srnames_from_site, ) from r2.models import ( Account, Collection, FakeAccount, Frontpage, Subreddit, MultiReddit, ) # use the original function to avoid going out to memcached. get_nsfw_collections_srnames = get_nsfw_collections_srnames.memoized_fn subscriptions_srnames = ["foo", "bar"] subscriptions = map(lambda srname: Subreddit(name=srname), subscriptions_srnames) multi_srnames = ["bing", "bat"] multi_subreddits = map(lambda srname: Subreddit(name=srname), multi_srnames) nice_srname = "mylittlepony" nsfw_srname = "pr0n" questionably_nsfw = "sexstories" quarantined_srname = "croontown" naughty_subscriptions = [ Subreddit(name=nice_srname), Subreddit(name=nsfw_srname, over_18=True), Subreddit(name=quarantined_srname, quarantine=True), ] nsfw_collection_srnames = [questionably_nsfw, nsfw_srname] nsfw_collection = Collection(name="after dark", sr_names=nsfw_collection_srnames,
def subreddits(self): from subreddit import Subreddit return Subreddit.user_subreddits(self)
def get_tree(self): if self.parent: sr = Subreddit._byID(self.parent.sr_id) return sr_conversation(sr, self.parent) return moderator_messages(self.sr.sr_ids)
def get_drafts(user): draft_sr = Subreddit._by_name(user.draft_sr_name) return get_links(draft_sr, 'new', 'all')
def thing_attr(self, thing, attr): from r2.models import Subreddit if attr == "is_mod": t = thing.lookups[0] if isinstance(thing, Wrapped) else thing return bool(Subreddit.reverse_moderator_ids(t)) return ThingJsonTemplate.thing_attr(self, thing, attr)
def deleted_account_cleanup(data): from r2.models import Subreddit from r2.models.admin_notes import AdminNotesBySystem from r2.models.flair import Flair from r2.models.token import OAuth2Client for account_id36 in data.itervalues(): account = Account._byID36(account_id36, data=True) if not account._deleted: continue # wipe the account's password and email address account.password = "" account.email = "" account.email_verified = False notes = "" # "noisy" rel removals, we'll record all of these in the account's # usernotes in case we need the information later rel_removal_descriptions = { "moderator": "Unmodded", "moderator_invite": "Cancelled mod invite", "contributor": "Removed as contributor", "banned": "Unbanned", "wikibanned": "Un-wikibanned", "wikicontributor": "Removed as wiki contributor", } if account.has_subscribed: rel_removal_descriptions["subscriber"] = "Unsubscribed" for rel_type, description in rel_removal_descriptions.iteritems(): try: ids_fn = getattr(Subreddit, "reverse_%s_ids" % rel_type) sr_ids = ids_fn(account) sr_names = [] srs = Subreddit._byID(sr_ids, data=True, return_dict=False) for subreddit in srs: remove_fn = getattr(subreddit, "remove_" + rel_type) remove_fn(account) sr_names.append(subreddit.name) if description and sr_names: sr_list = ", ".join(sr_names) notes += "* %s from %s\n" % (description, sr_list) except Exception as e: notes += "* Error cleaning up %s rels: %s\n" % (rel_type, e) # silent rel removals, no record left in the usernotes rel_classes = { "flair": Flair, "friend": Friend, "enemy": Friend, } for rel_name, rel_cls in rel_classes.iteritems(): try: rels = rel_cls._query( rel_cls.c._thing2_id == account._id, rel_cls.c._name == rel_name, eager_load=True, ) for rel in rels: remove_fn = getattr(rel._thing1, "remove_" + rel_name) remove_fn(account) except Exception as e: notes += "* Error cleaning up %s rels: %s\n" % (rel_name, e) # add the note with info about the major changes to the account if notes: AdminNotesBySystem.add( system_name="user", subject=account.name, note="Account deletion cleanup summary:\n\n%s" % notes, author="<automated>", when=datetime.now(g.tz), ) account._commit()
def get_available_pageviews(targets, start, end, location=None, datestr=False, ignore=None, platform='all'): """ Return the available pageviews by date for the targets and location. Available pageviews depends on all equal and higher level locations: A location is: subreddit > country > metro e.g. if a campaign is targeting /r/funny in USA/Boston we need to check that there's enough inventory in: * /r/funny (all campaigns targeting /r/funny regardless of location) * /r/funny + USA (all campaigns targeting /r/funny and USA with or without metro level targeting) * /r/funny + USA + Boston (all campaigns targeting /r/funny and USA and Boston) The available inventory is the smallest of these values. """ # assemble levels of location targeting, None means untargeted locations = [None] if location: locations.append(location) if location.metro: locations.append(Location(country=location.country)) # get all the campaigns directly and indirectly involved in our target targets, is_single = tup(targets, ret_is_single=True) target_srs = list( chain.from_iterable(target.subreddits_slow for target in targets)) all_campaigns = find_campaigns(target_srs, start, end, ignore) # get predicted pageviews for each subreddit and location all_sr_names = set(sr.name for sr in target_srs) all_sr_names |= set( chain.from_iterable(campaign.target.subreddit_names for campaign in all_campaigns)) all_srs = Subreddit._by_name(all_sr_names).values() pageviews_dict = { location: get_predicted_pageviews(all_srs, location) for location in locations } # determine booked impressions by target and location for each day dates = set(get_date_range(start, end)) booked_dict = {} for date in dates: booked_dict[date] = {} for location in locations: booked_dict[date][location] = defaultdict(int) for campaign in all_campaigns: camp_dates = set(get_date_range(campaign.start_date, campaign.end_date)) sr_names = tuple(sorted(campaign.target.subreddit_names)) daily_impressions = campaign.impressions / campaign.ndays for location in locations: if location and not location.contains(campaign.location): # campaign's location is less specific than location continue for date in camp_dates.intersection(dates): booked_dict[date][location][sr_names] += daily_impressions # calculate inventory for each target and location on each date datekey = lambda dt: dt.strftime('%m/%d/%Y') if datestr else dt ret = {} for target in targets: name = make_target_name(target) subreddit_names = target.subreddit_names ret[name] = {} for date in dates: pageviews_by_location = {} for location in locations: # calculate available impressions for each location booked_by_target = booked_dict[date][location] pageviews_by_sr_name = pageviews_dict[location] pageviews_by_location[location] = get_maximized_pageviews( subreddit_names, booked_by_target, pageviews_by_sr_name) # available pageviews is the minimum from all locations min_pageviews = min(pageviews_by_location.values()) if PERCENT_MOBILE != 0: mobile_pageviews = min_pageviews * (float(PERCENT_MOBILE) / 100) if platform in ('mobile_web', 'mobile_native'): min_pageviews = mobile_pageviews if platform == 'desktop': min_pageviews = min_pageviews - mobile_pageviews ret[name][datekey(date)] = max(0, min_pageviews) if is_single: name = make_target_name(targets[0]) return ret[name] else: return ret
def wrap_items(self, items): from r2.lib.db import queries from r2.lib.template_helpers import add_attr user = c.user if c.user_is_loggedin else None aids = set(l.author_id for l in items if hasattr(l, 'author_id') and l.author_id is not None) authors = Account._byID(aids, data=True, stale=self.stale) now = datetime.datetime.now(g.tz) cakes = { a._id for a in authors.itervalues() if a.cake_expiration and a.cake_expiration >= now } friend_rels = user.friend_rels() if user and user.gold else {} subreddits = Subreddit.load_subreddits(items, stale=self.stale) can_ban_set = set() if user: for sr_id, sr in subreddits.iteritems(): if sr.can_ban(user): can_ban_set.add(sr_id) #get likes/dislikes try: likes = queries.get_likes(user, items) except tdb_cassandra.TRANSIENT_EXCEPTIONS as e: g.log.warning("Cassandra vote lookup failed: %r", e) likes = {} types = {} wrapped = [] modlink = {} modlabel = {} for s in subreddits.values(): modlink[s._id] = '/r/%s/about/moderators' % s.name modlabel[s._id] = (_('moderator of /r/%(reddit)s, ' 'speaking officially') % { 'reddit': s.name }) for item in items: w = self.wrap(item) wrapped.append(w) # add for caching (plus it should be bad form to use _ # variables in templates) w.fullname = item._fullname types.setdefault(w.render_class, []).append(w) w.author = None w.friend = False # List of tuples (see add_attr() for details) w.attribs = [] w.distinguished = None if hasattr(item, "distinguished"): if item.distinguished == 'yes': w.distinguished = 'moderator' elif item.distinguished in ('admin', 'special', 'gold', 'gold-auto'): w.distinguished = item.distinguished try: w.author = authors.get(item.author_id) if user and item.author_id in user.friends: # deprecated old way: w.friend = True # new way: label = None if friend_rels: rel = friend_rels[item.author_id] note = getattr(rel, "note", None) if note: label = u"%s (%s)" % (_("friend"), _force_unicode(note)) add_attr(w.attribs, 'F', label) except AttributeError: pass if (w.distinguished == 'admin' and w.author): add_attr(w.attribs, 'A') if w.distinguished == 'moderator': add_attr(w.attribs, 'M', label=modlabel[item.sr_id], link=modlink[item.sr_id]) if w.distinguished == 'special': args = w.author.special_distinguish() args.pop('name') if not args.get('kind'): args['kind'] = 'special' add_attr(w.attribs, **args) if w.author and w.author._id in cakes and not c.profilepage: add_attr( w.attribs, kind="cake", label=(_("%(user)s just celebrated a reddit birthday!") % { "user": w.author.name }), link="/user/%s" % w.author.name, ) if hasattr(item, "sr_id") and item.sr_id is not None: w.subreddit = subreddits[item.sr_id] w.likes = likes.get((user, item)) # update vote tallies compute_votes(w, item) w.score = w.upvotes - w.downvotes if w.likes: base_score = w.score - 1 elif w.likes is None: base_score = w.score else: base_score = w.score + 1 # store the set of available scores based on the vote # for ease of i18n when there is a label w.voting_score = [(base_score + x - 1) for x in range(3)] w.deleted = item._deleted w.link_notes = [] if c.user_is_admin: if item._deleted: w.link_notes.append("deleted link") if getattr(item, "verdict", None): if not item.verdict.endswith("-approved"): w.link_notes.append(w.verdict) if c.user_is_admin and getattr(item, 'ip', None): w.ip_span = ip_span(item.ip) else: w.ip_span = "" # if the user can ban things on a given subreddit, or an # admin, then allow them to see that the item is spam, and # add the other spam-related display attributes w.show_reports = False w.show_spam = False w.can_ban = False w.use_big_modbuttons = self.spam_listing if (c.user_is_admin or (user and hasattr(item, 'sr_id') and item.sr_id in can_ban_set)): if getattr(item, "promoted", None) is None: w.can_ban = True ban_info = getattr(item, 'ban_info', {}) w.unbanner = ban_info.get('unbanner') if item._spam: w.show_spam = True w.moderator_banned = ban_info.get('moderator_banned', False) w.autobanned = ban_info.get('auto', False) w.banner = ban_info.get('banner') w.banned_at = ban_info.get("banned_at", None) if ban_info.get('note', None) and w.banner: w.banner += ' (%s)' % ban_info['note'] w.use_big_modbuttons = True if getattr(w, "author", None) and w.author._spam: w.show_spam = "author" if c.user == w.author and c.user._spam: w.show_spam = False w._spam = False w.use_big_modbuttons = False elif (getattr(item, 'reported', 0) > 0 and (not getattr(item, 'ignore_reports', False) or c.user_is_admin)): w.show_reports = True w.use_big_modbuttons = True # report_count isn't used in any template, but add it to # the Wrapped so it's pulled into the render cache key in # instances when reported will be used in the template w.report_count = item.reported w.approval_checkmark = None if w.can_ban: verdict = getattr(w, "verdict", None) if verdict in ('admin-approved', 'mod-approved'): approver = None approval_time = None baninfo = getattr(w, "ban_info", None) if baninfo: approver = baninfo.get("unbanner", None) approval_time = baninfo.get("unbanned_at", None) approver = approver or _("a moderator") if approval_time: text = _("approved by %(who)s %(when)s ago") % { "who": approver, "when": timesince(approval_time) } else: text = _("approved by %s") % approver w.approval_checkmark = text # recache the user object: it may be None if user is not logged in, # whereas now we are happy to have the UnloggedUser object user = c.user for cls in types.keys(): cls.add_props(user, types[cls]) return wrapped
def gen_keys(): yield promoted_memo_key # just let this one do its own writing load_all_reddits() yield queries.get_all_comments().iden l_q = Link._query(Link.c._spam == (True, False), Link.c._deleted == (True, False), sort=desc('_date'), data=True, ) for link in fetch_things2(l_q, verbosity): yield comments_key(link._id) yield last_modified_key(link, 'comments') if not getattr(link, 'is_self', False) and hasattr(link, 'url'): yield Link.by_url_key(link.url) a_q = Account._query(Account.c._spam == (True, False), sort=desc('_date'), ) for account in fetch_things2(a_q, verbosity): yield messages_key(account._id) yield last_modified_key(account, 'overview') yield last_modified_key(account, 'commented') yield last_modified_key(account, 'submitted') yield last_modified_key(account, 'liked') yield last_modified_key(account, 'disliked') yield queries.get_comments(account, 'new', 'all').iden yield queries.get_submitted(account, 'new', 'all').iden yield queries.get_liked(account).iden yield queries.get_disliked(account).iden yield queries.get_hidden(account).iden yield queries.get_saved(account).iden yield queries.get_inbox_messages(account).iden yield queries.get_unread_messages(account).iden yield queries.get_inbox_comments(account).iden yield queries.get_unread_comments(account).iden yield queries.get_inbox_selfreply(account).iden yield queries.get_unread_selfreply(account).iden yield queries.get_sent(account).iden sr_q = Subreddit._query(Subreddit.c._spam == (True, False), sort=desc('_date'), ) for sr in fetch_things2(sr_q, verbosity): yield last_modified_key(sr, 'stylesheet_contents') yield queries.get_links(sr, 'hot', 'all').iden yield queries.get_links(sr, 'new', 'all').iden for sort in 'top', 'controversial': for time in 'hour', 'day', 'week', 'month', 'year', 'all': yield queries.get_links(sr, sort, time, merge_batched=False).iden yield queries.get_spam_links(sr).iden yield queries.get_spam_comments(sr).iden yield queries.get_reported_links(sr).iden yield queries.get_reported_comments(sr).iden yield queries.get_subreddit_messages(sr).iden yield queries.get_unread_subreddit_messages(sr).iden
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET', 'HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = Subreddit._by_name(sr_names, stale=can_stale).values() if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] multi_path = '/r/' + sr_name if not srs: c.site = MultiReddit(multi_path, []) elif len(srs) == 1: c.site = srs[0] else: c.site = MultiReddit(multi_path, srs) elif '-' in sr_name: sr_names = sr_name.split('-') if not sr_names[0].lower() == All.name.lower(): redirect_to("/subreddits/search?q=%s" % sr_name) srs = Subreddit._by_name(sr_names[1:], stale=can_stale).values() srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if not srs: c.site = All else: c.site = AllMinus(srs) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/subreddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith( "/api/login/"): abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True