def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET', 'HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = set(Subreddit._by_name(sr_names, stale=can_stale).values()) if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if len(srs) == 0: c.site = MultiReddit([], sr_name) elif len(srs) == 1: c.site = srs.pop() else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, sr_name) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith("/api/login/") : abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def run(self, path): if not self.required and not path: return adhoc_multi_rx = is_adhoc_multi_rx.match(path) if is_multi_rx.match(path): return VMultiByPath(self.param, kinds=("m")).run(path) elif adhoc_multi_rx: sr_strings = adhoc_multi_rx.groups()[0].split("+") srs = Subreddit._by_name(sr_strings, stale=True).values() return MultiReddit(path, srs) else: try: return Subreddit._by_name(path) except NotFound: self.set_error(errors.INVALID_SITE_PATH)
def add_affinity_variant(variant, filename, debug=True): """Store related subreddits and similarity scores""" # Load file and split out affinity data with open(filename, 'rb') as f: sr_affinity_data = [] for line in f: sr_affinity_data.append(line.strip().split('\x01')) # Group similar subreddits (and scores) under the original subreddit subreddit_similarity = {} for sr_name, similar_subreddit, similarity in sr_affinity_data: if subreddit_similarity.get(sr_name): subreddit_similarity[sr_name][similar_subreddit] = similarity else: similar_subreddit_dict = {similar_subreddit: similarity} subreddit_similarity[sr_name] = similar_subreddit_dict # Add subreddit and their similar subreddits/scores to SubredditAffinity for sr_name, similar_subreddits in subreddit_similarity.iteritems(): try: subreddit = Subreddit._by_name(sr_name) except NotFound: print 'skipping: not found %s' % sr_name if debug: print "%s adding: %s" % (subreddit.name, similar_subreddits) else: SubredditAffinity.create(subreddit, variant, similar_subreddits)
def POST_report(self, form, jquery, report_type): """Report the thread for violating the rules of reddit.""" if form.has_errors("type", errors.INVALID_OPTION): return if c.user._spam or c.user.ignorereports: return already_reported = LiveUpdateReportsByAccount.get_report( c.user, c.liveupdate_event) if already_reported: self.abort403() LiveUpdateReportsByAccount.create( c.user, c.liveupdate_event, type=report_type) queries.report_event(c.liveupdate_event) try: default_subreddit = Subreddit._by_name(g.default_sr) except NotFound: pass else: not_yet_reported = g.cache.add( "lu_reported_" + str(c.liveupdate_event._id), 1, time=3600) if not_yet_reported: send_system_message( default_subreddit, subject="live thread reported", body=REPORTED_MESSAGE % { "title": c.liveupdate_event.title, "url": "/live/" + c.liveupdate_event._id, "reason": pages.REPORT_TYPES[report_type], }, )
def GET_promote_inventory(self, start, end, sr_name, collection_name): if not start or not end: start = promote.promo_datetime_now(offset=1).date() end = promote.promo_datetime_now(offset=8).date() c.errors.remove((errors.BAD_DATE, 'startdate')) c.errors.remove((errors.BAD_DATE, 'enddate')) target = Target(Frontpage.name) if sr_name: try: sr = Subreddit._by_name(sr_name) target = Target(sr.name) except NotFound: c.errors.add(errors.SUBREDDIT_NOEXIST, field='sr_name') elif collection_name: collection = Collection.by_name(collection_name) if not collection: c.errors.add(errors.COLLECTION_NOEXIST, field='collection_name') else: target = Target(collection) content = PromoteInventory(start, end, target) if c.render_style == 'csv': return content.as_csv() else: return PromotePage(title=_("sponsored link inventory"), content=content).render()
def submit_link(user, subreddit, title, url, thumb_url): account = Account._by_name(user) subreddit = Subreddit._by_name(subreddit) ip = '127.0.0.1' # submit the link link = Link._submit( is_self=False, title=title, content=url, author=account, sr=subreddit, ip=ip, spam=False, ) try: # force the thumbnail before scraper_q gets in the mix image_data = urllib.urlopen(thumb_url).read() force_thumbnail(link, image_data) except: pass # various backend processing things queries.new_link(link) link.update_search_index() # wait for the amqp worker to finish up worker.join() print link.make_permalink_slow()
def submit_all(): from r2.models import Subreddit, Account, Link, NotFound from r2.lib.media import set_media from r2.lib.db import queries sr = Subreddit._by_name('testmedia') author = Account._by_name('testmedia') links = [] for url in test_urls: try: # delete any existing version of the link l = Link._by_url(url, sr) print "Deleting %s" % l l._deleted = True l._commit() except NotFound: pass l = Link._submit(url, url, author, sr, '0.0.0.0') try: set_media(l) except Exception, e: print e if g.write_query_queue: queries.new_link(l) links.append(l)
def get_subreddit(self): """checks if the current url refers to a subreddit and returns that subreddit object. The cases here are: * the hostname is unset or is g.domain, in which case it looks for /r/XXXX or /subreddits. The default in this case is Default. * the hostname is a cname to a known subreddit. On failure to find a subreddit, returns None. """ from pylons import g from r2.models import Subreddit, Sub, NotFound, DefaultSR try: if (not self.hostname or is_subdomain(self.hostname, g.domain) or self.hostname.startswith(g.domain)): if self.path.startswith('/r/'): return Subreddit._by_name(self.path.split('/')[2]) elif self.path.startswith(('/subreddits/', '/reddits/')): return Sub else: return DefaultSR() elif self.hostname: return Subreddit._by_domain(self.hostname) except NotFound: pass return None
def campaign_has_oversold_error(form, campaign): if campaign.priority.inventory_override: return target = Subreddit._by_name(campaign.sr_name) if campaign.sr_name else None return has_oversold_error(form, campaign, campaign.start_date, campaign.end_date, campaign.bid, campaign.cpm, target)
def backfill(after=None): q = Subreddit._query(sort=asc('_date')) if after: sr = Subreddit._by_name(after) q = q._after(sr) for sr in fetch_things2(q): backfill_sr(sr)
def degolden(self, account): from r2.lib.db.queries import changed account.gold = False Award.take_away("reddit_gold", account) account._commit() if g.lounge_reddit: sr = Subreddit._by_name(g.lounge_reddit) sr.remove_contributor(account)
def nav(self): sr = Subreddit._by_name(g.default_sr) cloud = Tag.tag_cloud_for_subreddits([sr._id]) buttons = [] for tag, weight in cloud: buttons.append(NavButton(tag.name, tag.name, css_class=self.numbers[weight - 1])) return NavMenu(buttons, type="flatlist", separator=' ', base_path='/tag/')
def promote_link(link, campaign): if (not link.over_18 and campaign.sr_name and Subreddit._by_name(campaign.sr_name).over_18): link.over_18 = True link._commit() if not is_promoted(link): update_promote_status(link, PROMOTE_STATUS.promoted) emailer.live_promo(link)
def init_builder(self): sr = Subreddit._by_name(g.default_sr) return UnbannedCommentBuilder( self.query(), [sr._id], num = 5, wrap = RecentItems.wrap_thing, skip = True )
def enflair(subreddit_name, account_name, flair_text, flair_class): sr = Subreddit._by_name(subreddit_name) account = Account._by_name(account_name) sr.add_flair(account) setattr(account, "flair_%d_text" % sr._id, flair_text) setattr(account, "flair_%d_css_class" % sr._id, flair_class) account._commit()
def get_available_pageviews(targets, start, end, datestr=False, ignore=None): pageviews_by_sr_name = {} all_campaigns = set() targets, is_single = tup(targets, ret_is_single=True) target_srs = chain.from_iterable( target.subreddits_slow for target in targets) all_sr_names = set() srs = set(target_srs) # get all campaigns in target_srs and pull in campaigns from other # subreddits that are targeted while srs: all_sr_names |= {sr.name for sr in srs} new_pageviews_by_sr_name = get_predicted_pageviews(srs) pageviews_by_sr_name.update(new_pageviews_by_sr_name) new_campaigns_by_date = get_campaigns_by_date(srs, start, end, ignore) new_campaigns = set(chain.from_iterable( new_campaigns_by_date.itervalues())) all_campaigns.update(new_campaigns) new_sr_names = set(chain.from_iterable( campaign.target.subreddit_names for campaign in new_campaigns )) new_sr_names -= all_sr_names srs = set(Subreddit._by_name(new_sr_names).values()) # determine booked impressions by target for each day dates = set(get_date_range(start, end)) booked_by_target_by_date = {date: defaultdict(int) for date in dates} for campaign in all_campaigns: camp_dates = set(get_date_range(campaign.start_date, campaign.end_date)) sr_names = tuple(sorted(campaign.target.subreddit_names)) daily_impressions = campaign.impressions / campaign.ndays for date in camp_dates.intersection(dates): booked_by_target_by_date[date][sr_names] += daily_impressions datekey = lambda dt: dt.strftime('%m/%d/%Y') if datestr else dt ret = {} for target in targets: name = make_target_name(target) ret[name] = {} for date in dates: booked_by_target = booked_by_target_by_date[date] pageviews = get_maximized_pageviews( target.subreddit_names, booked_by_target, pageviews_by_sr_name) ret[name][datekey(date)] = max(0, pageviews) if is_single: name = make_target_name(targets[0]) return ret[name] else: return ret
def __init__(self, *args, **kwargs): from r2.lib.user_stats import top_users uids = top_users() users = Account._byID(uids, data=True, return_dict=False) # Filter out accounts banned from the default subreddit sr = Subreddit._by_name(g.default_sr) self.things = filter(lambda user: not sr.is_banned(user), users) Wrapped.__init__(self, *args, **kwargs)
def subreddit_facets(self): '''Filter out subreddits that the user isn't allowed to see''' if not self._subreddits and 'reddit' in self._facets: sr_facets = [(sr['value'], sr['count']) for sr in self._facets['reddit']] srs_by_name = Subreddit._by_name([sr[0] for sr in sr_facets]) self._subreddits = [sr for sr in sr_facets if srs_by_name[sr[0]].can_view(c.user)] return self._subreddits
def can_edit(self, user, user_is_admin=False): """Returns true if the supplied user is allowed to edit this meetup""" if user is None or isinstance(user, FakeAccount): return False elif user_is_admin or self.author_id == user._id: return True elif Subreddit._by_name('discussion').can_submit(user): return True else: return False
def ensure_subreddit(name, author): """Look up or create a subreddit and return it.""" try: sr = Subreddit._by_name(name) print ">> found /r/{}".format(name) return sr except NotFound: print ">> creating /r/{}".format(name) sr = Subreddit._new(name=name, title="/r/{}".format(name), author_id=author._id, lang="en", ip="127.0.0.1") sr._commit() return sr
def GET_listing(self, srname=None, include_managed=False, sort="all", **kw): self.sort = sort self.sr = None self.include_managed = include_managed if srname: try: self.sr = Subreddit._by_name(srname) except NotFound: pass return ListingController.GET_listing(self, **kw)
def subs_contribs(sr_name = 'betateam'): """Convert all subscribers of a given subreddit to contributors. Useful for forming opt-in beta teams""" from r2.models import Subreddit, SRMember sr = Subreddit._by_name(sr_name) q = SRMember._query(SRMember.c._thing1_id == sr._id) for rel in rels: if rel._name == 'subscriber': sr.add_contributor(rel._thing2)
def obscure(user): all_karmas = user.all_karmas() if not all_karmas: return _("lurker") srnames = [x[0] for x in all_karmas] srs = [sr for sr in Subreddit._by_name(srnames).values() if sr.type in ("public", "restricted")] if not srs: return _("something secret") srs.sort(key=lambda sr: sr._downs) most_obscure = srs[0] return "/r/" + most_obscure.name
def wiki_template(template_slug, sr=None): """Pull content from a subreddit's wiki page for internal use.""" if not sr: sr = Subreddit._by_name(g.default_sr) try: wiki = WikiPage.get(sr, "templates/%s" % template_slug) except tdb_cassandra.NotFound: return None return wiki._get("content")
def import_posts(input_filename, rewrite_filename, sr_name): pylons.c.default_sr = True sr = Subreddit._by_name(sr_name) input_file = open(input_filename) rewrite_file = open(rewrite_filename, 'w') data = yaml.load(input_file, Loader=yaml.CLoader) importer = Importer() importer.import_into_subreddit(sr, data, rewrite_file)
def configure_discussion(): from r2.models import Subreddit s = Subreddit._by_name('discussion') s.header = "/static/logo-discussion.png" s.stylesheet = "/static/discussion.css" s.infotext = u"""This part of the site is for the discussion of topics not yet ready or not suitable for normal top-level posts. Votes are only worth \N{PLUS-MINUS SIGN}1 point here. For more information, see [About Less Wrong](/about-less-wrong).""" s.posts_per_page_multiplier = 4 s.post_karma_multiplier = 1 s._commit()
def set_prefs(user, prefs): for k, v in prefs.iteritems(): if k == 'pref_beta' and v and not getattr(user, 'pref_beta', False): # If a user newly opted into beta, we want to subscribe them # to the beta subreddit. try: sr = Subreddit._by_name(g.beta_sr) sr.add_subscriber(user) except NotFound: g.log.warning("Could not find beta subreddit '%s'. It may " "need to be created." % g.beta_sr) setattr(user, k, v)
def enflair(subreddit_name, account_name, flair_text, flair_class): sr = Subreddit._by_name(subreddit_name) try: account = Account._by_name(account_name) except NotFound: return sr.add_flair(account) setattr(account, "flair_%d_text" % sr._id, flair_text) setattr(account, "flair_%d_css_class" % sr._id, flair_class) account._commit()
def degolden(self, account, severe=False): if severe: account.gold_charter = False Award.take_away("charter_subscriber", account) Award.take_away("reddit_gold", account) account.gold = False account._commit() if g.lounge_reddit and not getattr(account, "gold_charter", False): sr = Subreddit._by_name(g.lounge_reddit) sr.remove_contributor(account)
def GET_listing(self, sr=None, sort="", **env): if not c.user_is_loggedin or not c.user.email_verified: return self.redirect("/ad_inq") self.sort = sort self.sr = None if sr and sr == Frontpage.name: self.sr = Frontpage elif sr: try: self.sr = Subreddit._by_name(sr) except NotFound: pass return ListingController.GET_listing(self, **env)
def make_daily_promotions(offset=0, test=False): """ Arguments: offset - number of days after today to get the schedule for test - if True, new schedule will be generated but not launched Raises Exception with list of campaigns that had errors if there were any """ scheduled_adweights, error_campaigns = get_scheduled(offset) current_adweights_byid = get_live_promotions([LiveAdWeights.ALL_ADS]) current_adweights = current_adweights_byid[LiveAdWeights.ALL_ADS] link_names = [aw.link for aw in itertools.chain(scheduled_adweights, current_adweights)] links = Link._by_fullname(link_names, data=True) camp_names = [aw.campaign for aw in itertools.chain(scheduled_adweights, current_adweights)] campaigns = PromoCampaign._by_fullname(camp_names, data=True) srs = Subreddit._by_name([camp.sr_name for camp in campaigns.itervalues() if camp.sr_name]) expired_links = ({aw.link for aw in current_adweights} - {aw.link for aw in scheduled_adweights}) for link_name in expired_links: link = links[link_name] if is_promoted(link): if test: print "unpromote", link_name else: # update the query queue set_promote_status(link, PROMOTE_STATUS.finished) emailer.finished_promo(link) by_srid = defaultdict(list) for adweight in scheduled_adweights: link = links[adweight.link] campaign = campaigns[adweight.campaign] if campaign.sr_name: sr = srs[campaign.sr_name] sr_id = sr._id sr_over_18 = sr.over_18 else: sr_id = '' sr_over_18 = False if sr_over_18: if test: print "over18", link._fullname else: link.over_18 = True link._commit() if is_accepted(link) and not is_promoted(link): if test: print "promote2", link._fullname else: # update the query queue set_promote_status(link, PROMOTE_STATUS.promoted) emailer.live_promo(link) by_srid[sr_id].append(adweight) if not test: set_live_promotions(by_srid) _mark_promos_updated() else: print by_srid finalize_completed_campaigns(daysago=offset+1) hooks.get_hook('promote.make_daily_promotions').call(offset=offset) # after launching as many campaigns as possible, raise an exception to # report any error campaigns. (useful for triggering alerts in irc) if error_campaigns: raise Exception("Some scheduled campaigns could not be added to daily " "promotions: %r" % error_campaigns)
def test_run_srs(*sr_names): '''Inject Subreddits by name into the index''' srs = Subreddit._by_name(sr_names).values() uploader = SubredditUploader(g.CLOUDSEARCH_SUBREDDIT_DOC_API, things=srs) return uploader.inject()
from r2.models import Thing, Account, Subreddit, Link, Comment from r2.models.admintools import send_system_message from r2.models.gold import ( gold_goal_on, gold_revenue_multi, GoldRevenueGoalByDate, TIMEZONE, ) from r2.models.wiki import WikiPage from r2.lib.comment_tree import get_comment_tree from r2.lib.db import tdb_cassandra from reddit_gold.server_naming import gold_buyers_on SERVERNAME_SR = Subreddit._by_name(g.gold_servername_sr) SYSTEM_ACCOUNT = Account._by_name(g.system_user) def get_recent_name_submissions(): link_fullnames = list(queries.get_links(SERVERNAME_SR, "new", "all")) links = chain.from_iterable(Thing._by_fullname(chunk, return_dict=False) for chunk in in_chunks(link_fullnames)) for link in links: if link._deleted or link._spam: continue # OH GOD WHAT HAVE YOU POSTED IN MY LOVELY AUTOMATED SUBREDDIT!? if (not hasattr(link, "revenue_date") or not hasattr(link, "revenue_bucket") or
def _get_selfserve_links(self, count): links = Subreddit._by_name(g.advertising_links_sr).get_links('new', 'all') items = Link._by_fullname(links, data=True, return_dict=False) id36s = map(lambda x: self.advertising_link_id36_re.match(x.url).group(1), items) ad_links = Link._byID36(id36s, return_dict=False, data=True) return wrap_links(ad_links, num=count)
def get_drafts(user): draft_sr = Subreddit._by_name(user.draft_sr_name) return get_links(draft_sr, 'new', 'all')
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET', 'HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = Subreddit._by_name(sr_names, stale=can_stale).values() if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] multi_path = '/r/' + sr_name if not srs: c.site = MultiReddit(multi_path, []) elif len(srs) == 1: c.site = srs[0] else: c.site = MultiReddit(multi_path, srs) elif '-' in sr_name: sr_names = sr_name.split('-') base_sr_name, exclude_sr_names = sr_names[0], sr_names[1:] srs = Subreddit._by_name(sr_names, stale=can_stale) base_sr = srs.pop(base_sr_name, None) exclude_srs = [ sr for sr in srs.itervalues() if not isinstance(sr, FakeSubreddit) ] if base_sr == All: if exclude_srs: c.site = AllMinus(exclude_srs) else: c.site = All elif base_sr == Mod: if exclude_srs: c.site = ModMinus(exclude_srs) else: c.site = Mod else: redirect_to("/subreddits/search?q=%s" % sr_name) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/subreddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith( "/api/login/"): abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: # Redirect IDN to their IDNA name if necessary try: idna = _force_unicode(domain).encode("idna") if idna != domain: redirect_to("/domain/%s%s" % (idna, request.environ["PATH_INFO"])) except UnicodeError: domain = '' # Ensure valid_ascii_domain fails if not c.error_page and not valid_ascii_domain.match(domain): abort(404) c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def create_about_post(): user = Account._by_name('Eliezer_Yudkowsky') sr = Subreddit._by_name('admin') link = Link._submit('About LessWrong', 'TBC', user, sr, '::1', [])
def get_available_pageviews(targets, start, end, location=None, datestr=False, ignore=None): """ Return the available pageviews by date for the targets and location. Available pageviews depends on all equal and higher level locations: A location is: subreddit > country > metro e.g. if a campaign is targeting /r/funny in USA/Boston we need to check that there's enough inventory in: * /r/funny (all campaigns targeting /r/funny regardless of location) * /r/funny + USA (all campaigns targeting /r/funny and USA with or without metro level targeting) * /r/funny + USA + Boston (all campaigns targeting /r/funny and USA and Boston) The available inventory is the smallest of these values. """ # assemble levels of location targeting, None means untargeted locations = [None] if location: locations.append(location) if location.metro: locations.append(Location(country=location.country)) # get all the campaigns directly and indirectly involved in our target targets, is_single = tup(targets, ret_is_single=True) target_srs = list( chain.from_iterable(target.subreddits_slow for target in targets)) all_campaigns = find_campaigns(target_srs, start, end, ignore) # get predicted pageviews for each subreddit and location all_sr_names = set(sr.name for sr in target_srs) all_sr_names |= set( chain.from_iterable(campaign.target.subreddit_names for campaign in all_campaigns)) all_srs = Subreddit._by_name(all_sr_names).values() pageviews_dict = { location: get_predicted_pageviews(all_srs, location) for location in locations } # determine booked impressions by target and location for each day dates = set(get_date_range(start, end)) booked_dict = {} for date in dates: booked_dict[date] = {} for location in locations: booked_dict[date][location] = defaultdict(int) for campaign in all_campaigns: camp_dates = set(get_date_range(campaign.start_date, campaign.end_date)) sr_names = tuple(sorted(campaign.target.subreddit_names)) daily_impressions = campaign.impressions / campaign.ndays for location in locations: if location and not location.contains(campaign.location): # campaign's location is less specific than location continue for date in camp_dates.intersection(dates): booked_dict[date][location][sr_names] += daily_impressions # calculate inventory for each target and location on each date datekey = lambda dt: dt.strftime('%m/%d/%Y') if datestr else dt ret = {} for target in targets: name = make_target_name(target) subreddit_names = target.subreddit_names ret[name] = {} for date in dates: pageviews_by_location = {} for location in locations: # calculate available impressions for each location booked_by_target = booked_dict[date][location] pageviews_by_sr_name = pageviews_dict[location] pageviews_by_location[location] = get_maximized_pageviews( subreddit_names, booked_by_target, pageviews_by_sr_name) # available pageviews is the minimum from all locations min_pageviews = min(pageviews_by_location.values()) ret[name][datekey(date)] = max(0, min_pageviews) if is_single: name = make_target_name(targets[0]) return ret[name] else: return ret
Pixel, RedisCanvas, ) from .pages import ( PlaceEmbedPage, PlacePage, PlaceCanvasse, ) controller_hooks = hooks.HookRegistrar() ACCOUNT_CREATION_CUTOFF = datetime(2017, 3, 31, 0, 0, tzinfo=g.tz) PIXEL_COOLDOWN_SECONDS = 300 PIXEL_COOLDOWN = timedelta(seconds=PIXEL_COOLDOWN_SECONDS) ADMIN_RECT_DRAW_MAX_SIZE = 20 PLACE_SUBREDDIT = Subreddit._by_name("place", stale=True) @add_controller class LoggedOutPlaceController(BaseController): def pre(self): BaseController.pre(self) action = request.environ["pylons.routes_dict"].get("action") if action: if not self._get_action_handler(): action = 'invalid' controller = request.environ["pylons.routes_dict"]["controller"] timer_name = "service_time.web.{}.{}".format(controller, action) c.request_timer = g.stats.get_timer(timer_name) else:
def get_discovery_srid36s(): """Get list of srs that help people discover other srs.""" srs = Subreddit._by_name(g.live_config['discovery_srs']) return [sr._id36 for sr in srs.itervalues()]
def test_run_srs(*sr_names): '''Inject Subreddits by name into the index''' srs = Subreddit._by_name(sr_names).values() uploader = SolrSubredditUploader(things=srs) return uploader.inject()
def copy_karmas(): reddit = Subreddit._by_name('reddit.com') for user in all_users(): print user.name, user.link_karma, user.comment_karma user.incr_karma('link', reddit, user.link_karma) user.incr_karma('comment', reddit, user.comment_karma)
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET', 'HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = Subreddit._by_name(sr_names, stale=can_stale).values() if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] multi_path = '/r/' + sr_name if not srs: c.site = MultiReddit(multi_path, []) elif len(srs) == 1: c.site = srs[0] else: c.site = MultiReddit(multi_path, srs) elif '-' in sr_name: sr_names = sr_name.split('-') if not sr_names[0].lower() == All.name.lower(): redirect_to("/subreddits/search?q=%s" % sr_name) srs = Subreddit._by_name(sr_names[1:], stale=can_stale).values() srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if not srs: c.site = All else: c.site = AllMinus(srs) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/subreddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith( "/api/login/"): abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def inject_test_data(num_links=25, num_comments=25, num_votes=5): """Flood your reddit install with test data based on reddit.com.""" print ">>>> Ensuring configured objects exist" system_user = ensure_account(g.system_user) ensure_account(g.automoderator_account) ensure_subreddit(g.default_sr, system_user) ensure_subreddit(g.takedown_sr, system_user) ensure_subreddit(g.beta_sr, system_user) ensure_subreddit(g.promo_sr_name, system_user) print print print ">>>> Fetching real data from reddit.com" modeler = Modeler() subreddits = [ modeler.model_subreddit("pics"), modeler.model_subreddit("videos"), modeler.model_subreddit("askhistorians"), ] extra_settings = { "pics": { "show_media": True, }, "videos": { "show_media": True, }, } print print print ">>>> Generating test data" print ">>> Accounts" account_query = Account._query(sort="_date", limit=500, data=True) accounts = [a for a in account_query if a.name != g.system_user] accounts.extend( ensure_account(modeler.generate_username()) for i in xrange(50 - len(accounts))) print ">>> Content" things = [] for sr_model in subreddits: sr_author = random.choice(accounts) sr = ensure_subreddit(sr_model.name, sr_author) # make the system user subscribed for easier testing if sr.add_subscriber(system_user): sr._incr("_ups", 1) # apply any custom config we need for this sr for setting, value in extra_settings.get(sr.name, {}).iteritems(): setattr(sr, setting, value) sr._commit() for i in xrange(num_links): link_author = random.choice(accounts) url = sr_model.generate_link_url() is_self = (url == "self") content = sr_model.generate_selfpost_body() if is_self else url link = Link._submit( is_self=is_self, title=sr_model.generate_link_title(), content=content, author=link_author, sr=sr, ip="127.0.0.1", ) queries.new_link(link) things.append(link) comments = [None] for i in xrange(fuzz_number(num_comments)): comment_author = random.choice(accounts) comment, inbox_rel = Comment._new( comment_author, link, parent=random.choice(comments), body=sr_model.generate_comment_body(), ip="127.0.0.1", ) queries.new_comment(comment, inbox_rel) comments.append(comment) things.append(comment) for thing in things: for i in xrange(fuzz_number(num_votes)): direction = random.choice([ Vote.DIRECTIONS.up, Vote.DIRECTIONS.unvote, Vote.DIRECTIONS.down, ]) voter = random.choice(accounts) cast_vote(voter, thing, direction) amqp.worker.join() srs = [Subreddit._by_name(n) for n in ("pics", "videos", "askhistorians")] LocalizedDefaultSubreddits.set_global_srs(srs) LocalizedFeaturedSubreddits.set_global_srs([Subreddit._by_name('pics')])
def campaign_has_oversold_error(form, campaign): target = Subreddit._by_name(campaign.sr_name) if campaign.sr_name else None return has_oversold_error(form, campaign._id, campaign.start_date, campaign.end_date, campaign.bid, campaign.cpm, target)
def make_daily_promotions(offset=0, test=False): """ Arguments: offset - number of days after today to get the schedule for test - if True, new schedule will be generated but not launched Raises Exception with list of campaigns that had errors if there were any """ schedule = get_scheduled(offset) all_links = set([l._fullname for l in schedule['links']]) error_campaigns = schedule['error_campaigns'] weighted = weight_schedule(schedule['by_sr']) # over18 check for sr, links in weighted.iteritems(): if sr: sr = Subreddit._by_name(sr) if sr.over_18: for l in Link._by_fullname([l[0] for l in links], return_dict=False): l.over_18 = True if not test: l._commit() old_ads = get_live_promotions([LiveAdWeights.ALL_ADS]) old_links = set(x.link for x in old_ads[LiveAdWeights.ALL_ADS]) # links that need to be promoted new_links = all_links - old_links # links that have already been promoted old_links = old_links - all_links links = Link._by_fullname(new_links.union(old_links), data=True, return_dict=True) for l in old_links: if is_promoted(links[l]): if test: print "unpromote", l else: # update the query queue set_promote_status(links[l], PROMOTE_STATUS.finished) emailer.finished_promo(links[l]) for l in new_links: if is_accepted(links[l]): if test: print "promote2", l else: # update the query queue set_promote_status(links[l], PROMOTE_STATUS.promoted) emailer.live_promo(links[l]) # convert the weighted dict to use sr_ids which are more useful srs = {"": ""} for srname in weighted.keys(): if srname: srs[srname] = Subreddit._by_name(srname)._id weighted = dict((srs[k], v) for k, v in weighted.iteritems()) if not test: set_live_promotions(weighted) _mark_promos_updated() else: print weighted # after launching as many campaigns as possible, raise an exception to # report any error campaigns. (useful for triggering alerts in irc) if error_campaigns: raise Exception("Some scheduled campaigns could not be added to daily " "promotions: %r" % error_campaigns)