def __call__(self, environ, start_response): g = config['pylons.g'] http_host = environ.get('HTTP_HOST', 'localhost').lower() domain, s, port = http_host.partition(':') # remember the port try: environ['request_port'] = int(port) except ValueError: pass # localhost is exempt so paster run/shell will work # media_domain doesn't need special processing since it's just ads if domain == "localhost" or is_subdomain(domain, g.media_domain): return self.app(environ, start_response) # tell reddit_base to redirect to the appropriate subreddit for # a legacy CNAME if not is_subdomain(domain, g.domain): environ['legacy-cname'] = domain return self.app(environ, start_response) # figure out what subdomain we're on if any subdomains = domain[:-len(g.domain) - 1].split('.') extension_subdomains = dict( m="mobile", i="compact", api="api", rss="rss", xml="xml", json="json") sr_redirect = None for subdomain in subdomains[:]: if subdomain in g.reserved_subdomains: continue extension = extension_subdomains.get(subdomain) if extension: environ['reddit-domain-extension'] = extension elif self.lang_re.match(subdomain): environ['reddit-prefer-lang'] = subdomain environ['reddit-domain-prefix'] = subdomain else: sr_redirect = subdomain subdomains.remove(subdomain) # if there was a subreddit subdomain, redirect if sr_redirect and environ.get("FULLPATH"): if not subdomains and g.domain_prefix: subdomains.append(g.domain_prefix) subdomains.append(g.domain) redir = "%s/r/%s/%s" % ('.'.join(subdomains), sr_redirect, environ['FULLPATH']) redir = "http://" + redir.replace('//', '/') start_response("301 Moved Permanently", [("Location", redir)]) return [""] return self.app(environ, start_response)
def _get_scrape_url(link): if not link.is_self: sr_name = link.subreddit_slow.name if not feature.is_enabled("imgur_gif_conversion", subreddit=sr_name): return link.url p = UrlParser(link.url) # If it's a gif link on imgur, replacing it with gifv should # give us the embedly friendly video url if is_subdomain(p.hostname, "imgur.com"): if p.path_extension().lower() == "gif": p.set_extension("gifv") return p.unparse() return link.url urls = extract_urls_from_markdown(link.selftext) second_choice = None for url in urls: p = UrlParser(url) if p.is_reddit_url(): continue # If we don't find anything we like better, use the first image. if not second_choice: second_choice = url # This is an optimization for "proof images" in AMAs. if is_subdomain(p.netloc, 'imgur.com') or p.has_image_extension(): return url return second_choice
def __call__(self, environ, start_response): g = config['pylons.g'] http_host = environ.get('HTTP_HOST', 'localhost').lower() domain, s, port = http_host.partition(':') # remember the port try: environ['request_port'] = int(port) except ValueError: pass # localhost is exempt so paster run/shell will work # media_domain doesn't need special processing since it's just ads if domain == "localhost" or is_subdomain(domain, g.media_domain): return self.app(environ, start_response) # tell reddit_base to redirect to the appropriate subreddit for # a legacy CNAME if not is_subdomain(domain, g.domain): environ['legacy-cname'] = domain return self.app(environ, start_response) # figure out what subdomain we're on if any subdomains = domain[:-len(g.domain) - 1].split('.') extension_subdomains = dict(m="mobile", i="compact", api="api", rss="rss", xml="xml", json="json") sr_redirect = None for subdomain in subdomains[:]: if subdomain in g.reserved_subdomains: continue extension = extension_subdomains.get(subdomain) if extension: environ['reddit-domain-extension'] = extension elif self.lang_re.match(subdomain): environ['reddit-prefer-lang'] = subdomain environ['reddit-domain-prefix'] = subdomain else: sr_redirect = subdomain subdomains.remove(subdomain) # if there was a subreddit subdomain, redirect if sr_redirect and environ.get("FULLPATH"): if not subdomains and g.domain_prefix: subdomains.append(g.domain_prefix) subdomains.append(g.domain) redir = "%s/r/%s/%s" % ('.'.join(subdomains), sr_redirect, environ['FULLPATH']) redir = "http://" + redir.replace('//', '/') start_response("301 Moved Permanently", [("Location", redir)]) return [""] return self.app(environ, start_response)
def __call__(self, environ, start_response): g = config["pylons.g"] http_host = environ.get("HTTP_HOST", "localhost").lower() domain, s, port = http_host.partition(":") # remember the port try: environ["request_port"] = int(port) except ValueError: pass # localhost is exempt so paster run/shell will work # media_domain doesn't need special processing since it's just ads if domain == "localhost" or is_subdomain(domain, g.media_domain): return self.app(environ, start_response) # tell reddit_base to redirect to the appropriate subreddit for # a legacy CNAME if not is_subdomain(domain, g.domain): environ["legacy-cname"] = domain return self.app(environ, start_response) # figure out what subdomain we're on if any subdomains = domain[: -len(g.domain) - 1].split(".") extension_subdomains = dict(m="mobile", i="compact", api="api", rss="rss", xml="xml", json="json") sr_redirect = None for subdomain in subdomains[:]: if subdomain in g.reserved_subdomains: continue extension = extension_subdomains.get(subdomain) if extension: environ["reddit-domain-extension"] = extension elif self.lang_re.match(subdomain): environ["reddit-prefer-lang"] = subdomain environ["reddit-domain-prefix"] = subdomain else: sr_redirect = subdomain subdomains.remove(subdomain) # if there was a subreddit subdomain, redirect if sr_redirect and environ.get("FULLPATH"): r = Response() if not subdomains and g.domain_prefix: subdomains.append(g.domain_prefix) subdomains.append(g.domain) redir = "%s/r/%s/%s" % (".".join(subdomains), sr_redirect, environ["FULLPATH"]) redir = "http://" + redir.replace("//", "/") r.status_code = 301 r.headers["location"] = redir r.content = "" return r(environ, start_response) return self.app(environ, start_response)
def is_trusted_origin(origin): try: origin = urlparse(origin) except ValueError: return False return any(is_subdomain(origin.hostname, domain) for domain in g.trusted_domains)
def js_config(): config = { # is the user logged in? "logged": c.user_is_loggedin and c.user.name, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # are we in an iframe? "cnameframe": bool(c.cname and not c.authorized_cname), # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "vl": {}, "sr": {}, "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracking_domain": g.tracking_domain, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), } return config
def js_config(extra_config=None): config = { # is the user logged in? "logged": c.user_is_loggedin and c.user.name, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # are we in an iframe? "cnameframe": bool(c.cname and not c.authorized_cname), # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "fetch_trackers_url": g.fetch_trackers_url, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), "over_18": bool(c.over18), "vote_hash": c.vote_hash, } if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def allowed_media_preview_url(url): p = UrlParser(url) if p.has_static_image_extension(): return True for allowed_domain in g.media_preview_domain_whitelist: if is_subdomain(p.hostname, allowed_domain): return True return False
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name gold = bool(logged and c.user.gold) config = { # is the user logged in? "logged": logged, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # does the client only want to communicate over HTTPS? "https_forced": c.user.https_forced, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "fetch_trackers_url": g.fetch_trackers_url, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), "over_18": bool(c.over18), "new_window": bool(c.user.pref_newwindow), "vote_hash": c.vote_hash, "gold": gold, "has_subscribed": logged and c.user.has_subscribed, } if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def js_config(): config = { # is the user logged in? "logged": c.user_is_loggedin and c.user.name, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # are we in an iframe? "cnameframe": bool(c.cname and not c.authorized_cname), # this page's referer "referer": _force_unicode(request.referer) or "", # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "vl": {}, "sr": {}, "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracking_domain": g.tracking_domain, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "static_root": static(''), } return config
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name gold = bool(logged and c.user.gold) config = { # is the user logged in? "logged": logged, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "fetch_trackers_url": g.fetch_trackers_url, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), "over_18": bool(c.over18), "vote_hash": c.vote_hash, } if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def OPTIONS_report_cache_poisoning(self): """Send CORS headers for cache poisoning reports.""" if "Origin" not in request.headers: return origin = request.headers["Origin"] parsed_origin = UrlParser(origin) if not is_subdomain(parsed_origin.hostname, g.domain): return response.headers["Access-Control-Allow-Origin"] = origin response.headers["Access-Control-Allow-Methods"] = "POST" response.headers["Access-Control-Allow-Headers"] = \ "Authorization, X-Loggit, " response.headers["Access-Control-Allow-Credentials"] = "false" response.headers['Access-Control-Expose-Headers'] = \ self.COMMON_REDDIT_HEADERS
def on_crawlable_domain(self): # This ensures we don't have the port included. requested_domain = utils.domain(request.host) # If someone CNAMEs myspammysite.com to reddit.com or something, we # don't want search engines to index that. if not utils.is_subdomain(requested_domain, g.domain): return False # Only allow the canonical desktop site and mobile subdomains, since # we have canonicalization set up appropriately for them. # Note: in development, DomainMiddleware needs to be temporarily # modified to not skip assignment of reddit-domain-extension on # localhost for this to work properly. return (requested_domain == g.domain or request.environ.get('reddit-domain-extension') in ('mobile', 'compact'))
def _get_scrape_url(link): if not link.is_self: return link.url urls = extract_urls_from_markdown(link.selftext) second_choice = None for url in urls: p = UrlParser(url) if p.is_reddit_url(): continue # If we don't find anything we like better, use the first image. if not second_choice: second_choice = url # This is an optimization for "proof images" in AMAs. if is_subdomain(p.netloc, 'imgur.com') or p.has_image_extension(): return url return second_choice
def js_config(extra_config=None): config = { # is the user logged in? "logged": c.user_is_loggedin and c.user.name, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # are we in an iframe? "cnameframe": bool(c.cname and not c.authorized_cname), # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "fetch_trackers_url": g.fetch_trackers_url, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), "over_18": bool(c.over18), "vote_hash": c.vote_hash, } if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] mac = hmac.new(g.secrets["action_name"], controller_name + '.' + action_name, hashlib.sha1) verification = mac.hexdigest() cur_subreddit = "" if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # the subreddit's name (for posts) "post_site": cur_subreddit, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # does the client only want to communicate over HTTPS? "https_forced": c.user.https_forced, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "static_root": static(''), "over_18": bool(c.over18), "new_window": bool(c.user.pref_newwindow), "vote_hash": c.vote_hash, "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": controller_name + '.' + action_name, }, } if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def _edit_promo(self, form, jquery, username, title, url, selftext, kind, disable_comments, sendreplies, media_url, media_autoplay, media_override, gifts_embed_url, media_url_type, domain_override, is_managed, l=None, thumbnail_file=None): should_ratelimit = False if not c.user_is_sponsor: should_ratelimit = True if not should_ratelimit: c.errors.remove((errors.RATELIMIT, 'ratelimit')) # check for user override if not l and c.user_is_sponsor and username: try: user = Account._by_name(username) except NotFound: c.errors.add(errors.USER_DOESNT_EXIST, field="username") form.set_error(errors.USER_DOESNT_EXIST, "username") return if not user.email: c.errors.add(errors.NO_EMAIL_FOR_USER, field="username") form.set_error(errors.NO_EMAIL_FOR_USER, "username") return if not user.email_verified: c.errors.add(errors.NO_VERIFIED_EMAIL, field="username") form.set_error(errors.NO_VERIFIED_EMAIL, "username") return else: user = c.user # check for shame banned domains if form.has_errors("url", errors.DOMAIN_BANNED): g.stats.simple_event('spam.shame.link') return # demangle URL in canonical way if url: if isinstance(url, (unicode, str)): form.set_inputs(url=url) elif isinstance(url, tuple) or isinstance(url[0], Link): # there's already one or more links with this URL, but # we're allowing mutliple submissions, so we really just # want the URL url = url[0].url if kind == 'link': if form.has_errors('url', errors.NO_URL, errors.BAD_URL): return # users can change the disable_comments on promoted links if ((not l or not promote.is_promoted(l)) and (form.has_errors('title', errors.NO_TEXT, errors.TOO_LONG) or jquery.has_errors('ratelimit', errors.RATELIMIT))): return if kind == 'self' and form.has_errors('text', errors.TOO_LONG): return if not l: # creating a new promoted link l = promote.new_promotion(title, url if kind == 'link' else 'self', selftext if kind == 'self' else '', user, request.ip) l.domain_override = domain_override or None if c.user_is_sponsor: l.managed_promo = is_managed l._commit() # only set the thumbnail when creating a link if thumbnail_file: try: force_thumbnail(l, thumbnail_file) l._commit() except IOError: pass form.redirect(promote.promo_edit_url(l)) elif not promote.is_promo(l): return # changing link type is not allowed if ((l.is_self and kind == 'link') or (not l.is_self and kind == 'self')): c.errors.add(errors.NO_CHANGE_KIND, field="kind") form.set_error(errors.NO_CHANGE_KIND, "kind") return changed = False # live items can only be changed by a sponsor, and also # pay the cost of de-approving the link if not promote.is_promoted(l) or c.user_is_sponsor: if title and title != l.title: l.title = title changed = not c.user_is_sponsor if kind == 'link' and url and url != l.url: l.url = url changed = not c.user_is_sponsor # only trips if the title and url are changed by a non-sponsor if changed: promote.unapprove_promotion(l) # selftext can be changed at any time if kind == 'self': l.selftext = selftext # comment disabling and sendreplies is free to be changed any time. l.disable_comments = disable_comments l.sendreplies = sendreplies if c.user_is_sponsor: if (form.has_errors("media_url", errors.BAD_URL) or form.has_errors("gifts_embed_url", errors.BAD_URL)): return scraper_embed = media_url_type == "scrape" media_url = media_url or None gifts_embed_url = gifts_embed_url or None if c.user_is_sponsor and scraper_embed and media_url != l.media_url: if media_url: media = _scrape_media( media_url, autoplay=media_autoplay, save_thumbnail=False, use_cache=True) if media: l.set_media_object(media.media_object) l.set_secure_media_object(media.secure_media_object) l.media_url = media_url l.gifts_embed_url = None l.media_autoplay = media_autoplay else: c.errors.add(errors.SCRAPER_ERROR, field="media_url") form.set_error(errors.SCRAPER_ERROR, "media_url") return else: l.set_media_object(None) l.set_secure_media_object(None) l.media_url = None l.gifts_embed_url = None l.media_autoplay = False if (c.user_is_sponsor and not scraper_embed and gifts_embed_url != l.gifts_embed_url): if gifts_embed_url: parsed = UrlParser(gifts_embed_url) if not is_subdomain(parsed.hostname, "redditgifts.com"): c.errors.add(errors.BAD_URL, field="gifts_embed_url") form.set_error(errors.BAD_URL, "gifts_embed_url") return iframe = """ <iframe class="redditgifts-embed" src="%(embed_url)s" width="710" height="500" scrolling="no" frameborder="0" allowfullscreen> </iframe> """ % {'embed_url': websafe(gifts_embed_url)} media_object = { 'oembed': { 'description': 'redditgifts embed', 'height': 500, 'html': iframe, 'provider_name': 'redditgifts', 'provider_url': 'http://www.redditgifts.com/', 'title': 'redditgifts secret santa 2014', 'type': 'rich', 'width': 710}, 'type': 'redditgifts' } l.set_media_object(media_object) l.set_secure_media_object(media_object) l.media_url = None l.gifts_embed_url = gifts_embed_url l.media_autoplay = False else: l.set_media_object(None) l.set_secure_media_object(None) l.media_url = None l.gifts_embed_url = None l.media_autoplay = False if c.user_is_sponsor: l.media_override = media_override l.domain_override = domain_override or None l.managed_promo = is_managed l._commit() form.redirect(promote.promo_edit_url(l))
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() cur_subreddit = "" cur_sr_fullname = "" cur_listing = "" if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subreddit elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubreddit): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subreddit's name (for posts) "post_site": cur_subreddit, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "static_root": static(''), "over_18": bool(c.over18), "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "email_verified": logged and c.user.email and c.user.email_verified, } if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def __init__(self, space_compress=None, nav_menus=None, loginbox=True, infotext='', infotext_class=None, content=None, short_description='', title='', robots=None, show_sidebar=True, show_chooser=False, footer=True, srbar=True, page_classes=None, short_title=None, show_wiki_actions=False, extra_js_config=None, show_locationbar=False, **context): Templated.__init__(self, **context) self.title = title self.short_title = short_title self.short_description = short_description self.robots = robots self.infotext = infotext self.extra_js_config = extra_js_config self.show_wiki_actions = show_wiki_actions self.loginbox = loginbox self.show_sidebar = show_sidebar self.space_compress = space_compress # instantiate a footer self.footer = RedditFooter() if footer else None self.debug_footer = DebugFooter() self.supplied_page_classes = page_classes or [] #put the sort menus at the top self.nav_menu = MenuArea(menus = nav_menus) if nav_menus else None #add the infobar self.welcomebar = None self.newsletterbar = None self.locationbar = None self.infobar = None self.mobilewebredirectbar = None # generate a canonical link for google self.canonical_link = request.fullpath if c.render_style != "html": u = UrlParser(request.fullpath) u.set_extension("") u.hostname = g.domain if g.domain_prefix: u.hostname = "%s.%s" % (g.domain_prefix, u.hostname) self.canonical_link = u.unparse() # Generate a mobile link for Google. u = UrlParser(request.fullpath) u.switch_subdomain_by_extension('mobile') u.scheme = 'https' self.mobile_link = u.unparse() if self.show_infobar: if not infotext: if g.heavy_load_mode: # heavy load mode message overrides read only infotext = strings.heavy_load_msg elif g.read_only_mode: infotext = strings.read_only_msg elif g.live_config.get("announcement_message"): infotext = g.live_config["announcement_message"] if infotext: self.infobar = InfoBar( message=infotext, extra_class=infotext_class) elif (isinstance(c.site, DomainSR) and is_subdomain(c.site.domain, "imgur.com")): self.infobar = InfoBar(message= _("imgur.com domain listings (including this one) are " "currently disabled to speed up vote processing.") ) elif isinstance(c.site, AllMinus) and not c.user.gold: self.infobar = InfoBar(message=strings.all_minus_gold_only, extra_class="gold") if not c.user_is_loggedin: self.welcomebar = WelcomeBar() if feature.is_enabled('newsletter') and getattr(self, "show_newsletterbar", True): self.newsletterbar = NewsletterBar() if c.render_style == "compact": self.mobilewebredirectbar = MobileWebRedirectBar() show_locationbar &= not c.user.pref_hide_locationbar if (show_locationbar and c.used_localized_defaults and (not c.user_is_loggedin or not c.user.has_subscribed)): self.locationbar = LocationBar() self.srtopbar = None if srbar and not c.cname and not is_api(): self.srtopbar = SubredditTopBar() panes = [content] if c.user_is_loggedin and not is_api() and not self.show_wiki_actions: # insert some form templates for js to use # TODO: move these to client side templates gold_link = GoldPayment("gift", "monthly", months=1, signed=False, recipient="", giftmessage=None, passthrough=None, thing=None, clone_template=True, thing_type="link", ) gold_comment = GoldPayment("gift", "monthly", months=1, signed=False, recipient="", giftmessage=None, passthrough=None, thing=None, clone_template=True, thing_type="comment", ) report_form = ReportForm() if not feature.is_enabled('improved_sharing'): panes.append(ShareLink()) panes.append(report_form) if self.show_sidebar: panes.extend([gold_comment, gold_link]) if c.user_is_sponsor: panes.append(FraudForm()) self._content = PaneStack(panes) self.show_chooser = ( show_chooser and c.render_style == "html" and c.user_is_loggedin and ( isinstance(c.site, (DefaultSR, AllSR, ModSR, LabeledMulti)) or c.site.name == g.live_config["listing_chooser_explore_sr"] ) ) self.toolbars = self.build_toolbars() has_style_override = (c.user.pref_default_theme_sr and feature.is_enabled('stylesheets_everywhere') and c.user.pref_enable_default_themes) # if there is no style or the style is disabled for this subreddit self.no_sr_styles = (isinstance(c.site, DefaultSR) or (not self.get_subreddit_stylesheet_url(c.site) and not c.site.header) or (c.user and not c.user.use_subreddit_style(c.site))) self.default_theme_sr = DefaultSR() # use override stylesheet if they have custom styles disabled or # this subreddit has no custom stylesheet (or is the front page) if self.no_sr_styles: self.subreddit_stylesheet_url = self.get_subreddit_stylesheet_url( self.default_theme_sr) else: self.subreddit_stylesheet_url = self.get_subreddit_stylesheet_url(c.site) if has_style_override and self.no_sr_styles: sr = Subreddit._by_name(c.user.pref_default_theme_sr) # make sure they can still view their override subreddit if sr.can_view(c.user) and sr.stylesheet_url: self.subreddit_stylesheet_url = self.get_subreddit_stylesheet_url(sr) if c.can_apply_styles and c.allow_styles and sr.header: self.default_theme_sr = sr
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] mac = hmac.new(g.secrets["action_name"], controller_name + '.' + action_name, hashlib.sha1) verification = mac.hexdigest() cur_subreddit = "" if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # the subreddit's name (for posts) "post_site": cur_subreddit, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # does the client only want to communicate over HTTPS? "https_forced": c.user.https_forced, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": tracking.get_pageview_pixel_url() or '', "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "static_root": static(''), "over_18": bool(c.over18), "new_window": bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "vote_hash": c.vote_hash, "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": controller_name + '.' + action_name, }, "facebook_app_id": g.live_config["facebook_app_id"], } if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def __call__(self, environ, start_response): g = config['pylons.g'] http_host = environ.get('HTTP_HOST', 'localhost').lower() domain, s, port = http_host.partition(':') # remember the port try: environ['request_port'] = int(port) except ValueError: pass # localhost is exempt so paster run/shell will work # media_domain doesn't need special processing since it's just ads if domain == "localhost" or is_subdomain(domain, g.media_domain): return self.app(environ, start_response) # tell reddit_base to redirect to the appropriate subreddit for # a legacy CNAME if not is_subdomain(domain, g.domain): environ['legacy-cname'] = domain return self.app(environ, start_response) # How many characters to chop off the end of the hostname before # we start looking at subdomains ignored_suffix_len = len(g.domain) # figure out what subdomain we're on, if any subdomains = domain[:-ignored_suffix_len - 1].split('.') sr_redirect = None prefix_parts = [] for subdomain in subdomains[:]: extension = g.extension_subdomains.get(subdomain) # These subdomains are reserved, don't treat them as SR # or language subdomains. if subdomain in g.reserved_subdomains: # Some subdomains are reserved, but also can't be mixed into # the domain prefix for various reasons (permalinks will be # broken, etc.) if subdomain in g.ignored_subdomains: continue prefix_parts.append(subdomain) elif extension: environ['reddit-domain-extension'] = extension elif self.lang_re.match(subdomain): environ['reddit-prefer-lang'] = subdomain else: sr_redirect = subdomain subdomains.remove(subdomain) if 'reddit-prefer-lang' in environ: prefix_parts.insert(0, environ['reddit-prefer-lang']) if prefix_parts: environ['reddit-domain-prefix'] = '.'.join(prefix_parts) # if there was a subreddit subdomain, redirect if sr_redirect and environ.get("FULLPATH"): if not subdomains and g.domain_prefix: subdomains.append(g.domain_prefix) subdomains.append(g.domain) redir = "%s/r/%s/%s" % ('.'.join(subdomains), sr_redirect, environ['FULLPATH']) redir = "http://" + redir.replace('//', '/') start_response("301 Moved Permanently", [("Location", redir)]) return [""] return self.app(environ, start_response)
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() cur_subreddit = "" if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # the subreddit's name (for posts) "post_site": cur_subreddit, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # does the client only want to communicate over HTTPS? "https_forced": c.user.https_forced, # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "static_root": static(''), "over_18": bool(c.over18), "new_window": bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "vote_hash": c.vote_hash, "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, }, "facebook_app_id": g.live_config["facebook_app_id"], } if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def require_domain(required_domain): if not is_subdomain(request.host, required_domain): abort(ForbiddenError(errors.WRONG_DOMAIN))
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() cur_subreddit = "" cur_sr_fullname = "" cur_listing = "" listing_over_18 = False pref_no_profanity = not logged or c.user.pref_no_profanity pref_media_preview = c.user.pref_media_preview if not feature.is_enabled("autoexpand_media_previews"): expando_preference = None elif pref_media_preview == "subreddit": expando_preference = "subreddit_default" elif pref_media_preview == "on": expando_preference = "auto_expand" else: expando_preference = "do_not_expand" pref_beta = c.user.pref_beta nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subreddit listing_over_18 = c.site.over_18 elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubreddit): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] bp_currencyrates = hooks.get_hook("fetchcurrencyrates").call()[0] config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subreddit's name (for posts) "post_site": cur_subreddit, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), "debug": g.debug, "donation_address": g.live_config["blockpath_donation_addr"] if 'blockpath_donation_addr' in g.live_config else '', "bpcritical_notification": g.live_config["bpcritical_notification"] if 'bpcritical_notification' in g.live_config else '', "bp_qb_wallets": [i for i in c.user.bp_qb_wallets.split(',') if i], "pref_bp_gravity": c.user.pref_bp_gravity, "pref_bp_linkdistance": c.user.pref_bp_linkdistance, "pref_bp_chargedistance": c.user.pref_bp_chargedistance, "pref_bp_charge": c.user.pref_bp_charge, "pref_bp_theta": c.user.pref_bp_theta, "pref_bp_friction": c.user.pref_bp_friction, "pref_bp_nodesoftlimit": c.user.pref_bp_nodesoftlimit, "pref_bp_linksoftlimit": c.user.pref_bp_linksoftlimit, "pref_bp_currency": c.user.pref_bp_currency, "pref_bp_currencyatot": c.user.pref_bp_currencyatot, "pref_bp_linklabel": c.user.pref_bp_linklabel, "pref_bp_linklabeldir": c.user.pref_bp_linklabeldir, "bp_currencyrates": bp_currencyrates, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "static_root": static(''), "over_18": bool(c.over18), "listing_over_18": listing_over_18, "expando_preference": expando_preference, "pref_no_profanity": pref_no_profanity, "pref_beta": pref_beta, "nsfw_media_acknowledged": nsfw_media_acknowledged, "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "email_verified": logged and c.user.email and c.user.email_verified, "email": c.user.email } if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def js_config(extra_config=None): from r2.lib import promote logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name stats_name = route_name + ('' if promote.ads_enabled() else '.no_ads') banners_enabled = promote.banners_enabled(c.site, c.user) cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) share_ts = int(time.time() * 1000) share_tracking_hmac = None # Only enable for comments pages on desktop if (feature.is_enabled("url_share_tracking") and c.render_style == "html" and action_name == "GET_comments"): share_hash_msg = "%s" % share_ts if user_id: share_hash_msg = "%s|%s" % (user_id, share_ts) elif c.loid.loid: share_hash_msg = "%s|%s" % (c.loid.loid, share_ts) share_tracking_hmac = hmac.new(g.secrets["share_tracking"], share_hash_msg, hashlib.sha1).hexdigest() mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() stats_mac = hmac.new(g.secrets["action_name"], stats_name, hashlib.sha1) stats_verification = stats_mac.hexdigest() cur_subreddit = "" cur_sr_fullname = "" cur_listing = "" listing_over_18 = False pref_no_profanity = not logged or c.user.pref_no_profanity pref_media_preview = c.user.pref_media_preview if logged else "subreddit" if pref_media_preview == "subreddit": expando_preference = "subreddit_default" elif pref_media_preview == "on": expando_preference = "auto_expand" else: expando_preference = "do_not_expand" pref_beta = logged and c.user.pref_beta lazy_load_listings = (c.user.pref_numsites == 25 and controller_name in ['hot', 'new'] and feature.is_enabled("lazy_load_listings")) feature_frontpage_tagline = feature.is_enabled("frontpage_tagline") nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subreddit listing_over_18 = c.site.over_18 elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubreddit): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] if feature.is_enabled("live_orangereds") and logged: user_websocket_url = websockets.make_url("/user/%s" % c.user._id36, max_age=24 * 60 * 60) else: user_websocket_url = None config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subreddit's name (for posts) "post_site": cur_subreddit, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "feature_outbound_beacons": feature.is_enabled('outbound_beacons'), "feature_scroll_events": feature.is_enabled('scroll_events'), "static_root": static(''), "over_18": bool(c.over18), "listing_over_18": listing_over_18, "expando_preference": expando_preference, "pref_no_profanity": pref_no_profanity, "pref_beta": pref_beta, "nsfw_media_acknowledged": nsfw_media_acknowledged, "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, "statsName": stats_name, "statsVerification": stats_verification, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_expando_events": feature.is_enabled('expando_events'), "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "feature_ads_user_matching": feature.is_enabled('ads_user_matching'), "feature_flatlist_events": feature.is_enabled('flatlist_events'), "feature_mobile_native_banner": feature.is_enabled('mobile_native_banner'), "email_verified": logged and c.user.email and c.user.email_verified, "feature_post_embed": feature.is_enabled('post_embed'), "ad_serving_events_sample_rate": g.live_config.get("events_collector_ad_serving_sample_rate", 0), "share_tracking_hmac": share_tracking_hmac, "share_tracking_ts": share_ts, "user_websocket_url": user_websocket_url, "live_orangereds_pref": c.user.pref_live_orangereds, "pref_email_messages": logged and c.user.pref_email_messages, "feature_double_sidebar": banners_enabled and not isinstance(c.site, FakeSubreddit), # noqa "feature_lazy_load_listings": lazy_load_listings, "ads_loading_timeout_ms": g.live_config.get("ads_loading_timeout_ms", 1000), "feature_frontpage_tagline": feature_frontpage_tagline, } if feature.is_enabled("eu_cookie_policy"): config.update({ "requires_eu_cookie_policy": geoip.requires_eu_cookie_policy(request, c), "eu_cookie": g.eu_cookie, "eu_cookie_max_attempts": g.eu_cookie_max_attempts, }) if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def __call__(self, environ, start_response): g = config['pylons.g'] http_host = environ.get('HTTP_HOST', 'localhost').lower() domain, s, port = http_host.partition(':') # remember the port try: environ['request_port'] = int(port) except ValueError: pass # localhost is exempt so paster run/shell will work # media_domain doesn't need special processing since it's just ads if domain == "localhost" or is_subdomain(domain, g.media_domain): return self.app(environ, start_response) # tell reddit_base to redirect to the appropriate subreddit for # a legacy CNAME if not is_subdomain(domain, g.domain): environ['legacy-cname'] = domain return self.app(environ, start_response) # How many characters to chop off the end of the hostname before # we start looking at subdomains ignored_suffix_len = len(g.domain) # figure out what subdomain we're on, if any subdomains = domain[:-ignored_suffix_len - 1].split('.') extension_subdomains = dict(m="mobile", i="compact", api="api", rss="rss", xml="xml", json="json") sr_redirect = None prefix_parts = [] for subdomain in subdomains[:]: extension = extension_subdomains.get(subdomain) # These subdomains are reserved, don't treat them as SR # or language subdomains. if subdomain in g.reserved_subdomains: # Some subdomains are reserved, but also can't be mixed into # the domain prefix for various reasons (permalinks will be # broken, etc.) if subdomain in g.ignored_subdomains: continue prefix_parts.append(subdomain) elif extension: environ['reddit-domain-extension'] = extension elif self.lang_re.match(subdomain): environ['reddit-prefer-lang'] = subdomain else: sr_redirect = subdomain subdomains.remove(subdomain) if 'reddit-prefer-lang' in environ: prefix_parts.insert(0, environ['reddit-prefer-lang']) if prefix_parts: environ['reddit-domain-prefix'] = '.'.join(prefix_parts) # if there was a subreddit subdomain, redirect if sr_redirect and environ.get("FULLPATH"): if not subdomains and g.domain_prefix: subdomains.append(g.domain_prefix) subdomains.append(g.domain) redir = "%s/r/%s/%s" % ('.'.join(subdomains), sr_redirect, environ['FULLPATH']) redir = "http://" + redir.replace('//', '/') start_response("301 Moved Permanently", [("Location", redir)]) return [""] return self.app(environ, start_response)
def js_config(extra_config=None): from r2.lib import promote logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name stats_name = route_name + ('' if promote.ads_enabled() else '.no_ads') banners_enabled = promote.banners_enabled(c.site, c.user) cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) share_ts = int(time.time() * 1000) share_tracking_hmac = None # Only enable for comments pages on desktop if (feature.is_enabled("url_share_tracking") and c.render_style == "html" and action_name == "GET_comments"): share_hash_msg = "%s" % share_ts if user_id: share_hash_msg = "%s|%s" % (user_id, share_ts) elif c.loid.loid: share_hash_msg = "%s|%s" % (c.loid.loid, share_ts) share_tracking_hmac = hmac.new(g.secrets["share_tracking"], share_hash_msg, hashlib.sha1).hexdigest() mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() stats_mac = hmac.new(g.secrets["action_name"], stats_name, hashlib.sha1) stats_verification = stats_mac.hexdigest() cur_subreddit = "" cur_sr_fullname = "" cur_listing = "" listing_over_18 = False pref_no_profanity = not logged or c.user.pref_no_profanity pref_media_preview = c.user.pref_media_preview if logged else "subreddit" if pref_media_preview == "subreddit": expando_preference = "subreddit_default" elif pref_media_preview == "on": expando_preference = "auto_expand" else: expando_preference = "do_not_expand" pref_beta = logged and c.user.pref_beta lazy_load_listings = ( c.user.pref_numsites == 25 and controller_name in ['hot', 'new'] and feature.is_enabled("lazy_load_listings") ) feature_frontpage_tagline = feature.is_enabled("frontpage_tagline") nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subreddit listing_over_18 = c.site.over_18 elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubreddit): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] if feature.is_enabled("live_orangereds") and logged: user_websocket_url = websockets.make_url("/user/%s" % c.user._id36, max_age=24 * 60 * 60) else: user_websocket_url = None config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subreddit's name (for posts) "post_site": cur_subreddit, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "feature_outbound_beacons": feature.is_enabled('outbound_beacons'), "feature_scroll_events": feature.is_enabled('scroll_events'), "static_root": static(''), "over_18": bool(c.over18), "listing_over_18": listing_over_18, "expando_preference": expando_preference, "pref_no_profanity": pref_no_profanity, "pref_beta": pref_beta, "nsfw_media_acknowledged": nsfw_media_acknowledged, "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, "statsName": stats_name, "statsVerification": stats_verification, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_expando_events": feature.is_enabled('expando_events'), "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "feature_ads_user_matching": feature.is_enabled('ads_user_matching'), "feature_flatlist_events": feature.is_enabled('flatlist_events'), "feature_mobile_native_banner": feature.is_enabled( 'mobile_native_banner'), "email_verified": logged and c.user.email and c.user.email_verified, "feature_post_embed": feature.is_enabled('post_embed'), "ad_serving_events_sample_rate": g.live_config.get( "events_collector_ad_serving_sample_rate", 0), "share_tracking_hmac": share_tracking_hmac, "share_tracking_ts": share_ts, "user_websocket_url": user_websocket_url, "live_orangereds_pref": c.user.pref_live_orangereds, "pref_email_messages": logged and c.user.pref_email_messages, "feature_double_sidebar": banners_enabled and not isinstance(c.site, FakeSubreddit), # noqa "feature_lazy_load_listings": lazy_load_listings, "ads_loading_timeout_ms": g.live_config.get( "ads_loading_timeout_ms", 1000), "feature_frontpage_tagline": feature_frontpage_tagline, } if feature.is_enabled("eu_cookie_policy"): config.update({ "requires_eu_cookie_policy": geoip.requires_eu_cookie_policy(request, c), "eu_cookie": g.eu_cookie, "eu_cookie_max_attempts": g.eu_cookie_max_attempts, }) if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def POST_edit_promo(self, form, jquery, username, l, title, url, selftext, kind, disable_comments, sendreplies, media_url, media_autoplay, media_override, gifts_embed_url, media_url_type, domain_override, is_managed): should_ratelimit = False if not c.user_is_sponsor: should_ratelimit = True if not should_ratelimit: c.errors.remove((errors.RATELIMIT, 'ratelimit')) # check for user override if not l and c.user_is_sponsor and username: try: user = Account._by_name(username) except NotFound: c.errors.add(errors.USER_DOESNT_EXIST, field="username") form.set_error(errors.USER_DOESNT_EXIST, "username") return if not user.email: c.errors.add(errors.NO_EMAIL_FOR_USER, field="username") form.set_error(errors.NO_EMAIL_FOR_USER, "username") return if not user.email_verified: c.errors.add(errors.NO_VERIFIED_EMAIL, field="username") form.set_error(errors.NO_VERIFIED_EMAIL, "username") return else: user = c.user # check for shame banned domains if form.has_errors("url", errors.DOMAIN_BANNED): g.stats.simple_event('spam.shame.link') return # demangle URL in canonical way if url: if isinstance(url, (unicode, str)): form.set_inputs(url=url) elif isinstance(url, tuple) or isinstance(url[0], Link): # there's already one or more links with this URL, but # we're allowing mutliple submissions, so we really just # want the URL url = url[0].url if kind == 'link': if form.has_errors('url', errors.NO_URL, errors.BAD_URL): return # users can change the disable_comments on promoted links if ((not l or not promote.is_promoted(l)) and (form.has_errors('title', errors.NO_TEXT, errors.TOO_LONG) or jquery.has_errors('ratelimit', errors.RATELIMIT))): return if kind == 'self' and form.has_errors('text', errors.TOO_LONG): return if not l: # creating a new promoted link l = promote.new_promotion(title, url if kind == 'link' else 'self', selftext if kind == 'self' else '', user, request.ip) l.domain_override = domain_override or None if c.user_is_sponsor: l.managed_promo = is_managed l._commit() form.redirect(promote.promo_edit_url(l)) elif not promote.is_promo(l): return # changing link type is not allowed if ((l.is_self and kind == 'link') or (not l.is_self and kind == 'self')): c.errors.add(errors.NO_CHANGE_KIND, field="kind") form.set_error(errors.NO_CHANGE_KIND, "kind") return changed = False # live items can only be changed by a sponsor, and also # pay the cost of de-approving the link if not promote.is_promoted(l) or c.user_is_sponsor: if title and title != l.title: l.title = title changed = not c.user_is_sponsor if kind == 'link' and url and url != l.url: l.url = url changed = not c.user_is_sponsor # only trips if the title and url are changed by a non-sponsor if changed: promote.unapprove_promotion(l) # selftext can be changed at any time if kind == 'self': l.selftext = selftext # comment disabling and sendreplies is free to be changed any time. l.disable_comments = disable_comments l.sendreplies = sendreplies if c.user_is_sponsor: if (form.has_errors("media_url", errors.BAD_URL) or form.has_errors("gifts_embed_url", errors.BAD_URL)): return scraper_embed = media_url_type == "scrape" media_url = media_url or None gifts_embed_url = gifts_embed_url or None if c.user_is_sponsor and scraper_embed and media_url != l.media_url: if media_url: media = _scrape_media(media_url, autoplay=media_autoplay, save_thumbnail=False, use_cache=True) if media: l.set_media_object(media.media_object) l.set_secure_media_object(media.secure_media_object) l.media_url = media_url l.gifts_embed_url = None l.media_autoplay = media_autoplay else: c.errors.add(errors.SCRAPER_ERROR, field="media_url") form.set_error(errors.SCRAPER_ERROR, "media_url") return else: l.set_media_object(None) l.set_secure_media_object(None) l.media_url = None l.gifts_embed_url = None l.media_autoplay = False if (c.user_is_sponsor and not scraper_embed and gifts_embed_url != l.gifts_embed_url): if gifts_embed_url: parsed = UrlParser(gifts_embed_url) if not is_subdomain(parsed.hostname, "redditgifts.com"): c.errors.add(errors.BAD_URL, field="gifts_embed_url") form.set_error(errors.BAD_URL, "gifts_embed_url") return iframe = """ <iframe class="redditgifts-embed" src="%(embed_url)s" width="710" height="500" scrolling="no" frameborder="0" allowfullscreen> </iframe> """ % { 'embed_url': websafe(gifts_embed_url) } media_object = { 'oembed': { 'description': 'redditgifts embed', 'height': 500, 'html': iframe, 'provider_name': 'redditgifts', 'provider_url': 'http://www.redditgifts.com/', 'title': 'redditgifts secret santa 2014', 'type': 'rich', 'width': 710 }, 'type': 'redditgifts' } l.set_media_object(media_object) l.set_secure_media_object(media_object) l.media_url = None l.gifts_embed_url = gifts_embed_url l.media_autoplay = False else: l.set_media_object(None) l.set_secure_media_object(None) l.media_url = None l.gifts_embed_url = None l.media_autoplay = False if c.user_is_sponsor: l.media_override = media_override l.domain_override = domain_override or None l.managed_promo = is_managed l._commit() form.redirect(promote.promo_edit_url(l))
def __call__(self, environ, start_response): g = config['pylons.g'] http_host = environ.get('HTTP_HOST', 'localhost').lower() domain, s, port = http_host.partition(':') # remember the port try: environ['request_port'] = int(port) except ValueError: pass # localhost is exempt so paster run/shell will work # media_domain doesn't need special processing since it's just ads if domain == "localhost" or is_subdomain(domain, g.media_domain): return self.app(environ, start_response) # tell reddit_base to redirect to the appropriate subreddit for # a legacy CNAME if not is_subdomain(domain, g.domain): environ['legacy-cname'] = domain return self.app(environ, start_response) # How many characters to chop off the end of the hostname before # we start looking at subdomains ignored_suffix_len = len(g.domain) # OAuth is a bit of a special case. `foo.oauth.domain.com` should be # treated like `foo.domain.com` when generating links if g.oauth_domain and is_subdomain(domain, g.oauth_domain): ignored_suffix_len = len(g.oauth_domain) # figure out what subdomain we're on, if any subdomains = domain[:-ignored_suffix_len - 1].split('.') extension_subdomains = dict(m="mobile", i="compact", api="api", rss="rss", xml="xml", json="json") sr_redirect = None prefix_parts = [] for subdomain in subdomains[:]: extension = extension_subdomains.get(subdomain) # These subdomains have special meanings, don't treat them as SR # or language subdomains. if subdomain in g.reserved_subdomains: if subdomain == g.domain_prefix: continue prefix_parts.append(subdomain) elif extension: environ['reddit-domain-extension'] = extension elif self.lang_re.match(subdomain): environ['reddit-prefer-lang'] = subdomain else: sr_redirect = subdomain subdomains.remove(subdomain) if 'reddit-prefer-lang' in environ: prefix_parts.insert(0, environ['reddit-prefer-lang']) if prefix_parts: environ['reddit-domain-prefix'] = '.'.join(prefix_parts) # if there was a subreddit subdomain, redirect if sr_redirect and environ.get("FULLPATH"): if not subdomains and g.domain_prefix: subdomains.append(g.domain_prefix) subdomains.append(g.domain) redir = "%s/r/%s/%s" % ('.'.join(subdomains), sr_redirect, environ['FULLPATH']) redir = "http://" + redir.replace('//', '/') start_response("301 Moved Permanently", [("Location", redir)]) return [""] return self.app(environ, start_response)