def url(self, absolute=False, mangle_name=True): from r2.lib.template_helpers import static if g.uncompressedJS: return [source.url(absolute=absolute, mangle_name=mangle_name) for source in self.sources] else: return static(self.name, absolute=absolute, mangle_name=mangle_name)
def use(self): from r2.lib.template_helpers import static if c.secure: return script_tag.format(src=static("jquery.js")) else: ext = ".js" if g.uncompressedJS else ".min.js" return script_tag.format(src=self.cdn_src+ext)
def use(self): from r2.lib.template_helpers import static if g.uncompressedJS: return "".join(source.use() for source in self.sources) else: url = os.path.join(g.static_path, self.name) return script_tag.format(src=static(url))
def stylesheet_url(self): from r2.lib.template_helpers import static, get_domain if self.stylesheet_is_static: return static(self.static_stylesheet_name) else: return "http://%s/stylesheet.css?v=%s" % (get_domain(cname=False, subreddit=True), self.stylesheet_hash)
def url(self, absolute=False, mangle_name=False): from r2.lib.template_helpers import static path = [g.static_path, self.name] if g.uncompressedJS: path.insert(1, "js") return static(os.path.join(*path), absolute, mangle_name)
def js_config(): config = { # is the user logged in? "logged": c.user_is_loggedin and c.user.name, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # are we in an iframe? "cnameframe": bool(c.cname and not c.authorized_cname), # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "vl": {}, "sr": {}, "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracking_domain": g.tracking_domain, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), } return config
def use(self): from r2.lib.template_helpers import static if c.secure or c.user.pref_local_js: return script_tag.format(src=static(self.name)) else: ext = ".js" if g.uncompressedJS else ".min.js" return script_tag.format(src=self.cdn_src+ext)
def use(self): from r2.lib.template_helpers import static if c.secure or c.user.pref_local_js: return script_tag.format(src=static(self.name)) else: ext = ".js" if g.uncompressedJS else ".min.js" return script_tag.format(src=self.cdn_src + ext)
def header_url(url, absolute=False): if url == g.default_header_url: return static(url, absolute=absolute) elif absolute: return make_url_https(url) else: return make_url_protocol_relative(url)
def use(self, **kwargs): from pylons.i18n import get_lang from r2.lib.template_helpers import static from r2.lib.filters import SC_OFF, SC_ON if g.uncompressedJS: if c.lang == "en" or c.lang not in g.all_languages: # in this case, the msgids *are* the translated strings and we # can save ourselves the pricey step of lexing the js source return Module.use(self, **kwargs) msgids = extract_javascript_msgids(Module.get_source(self)) localized_appendices = self.localized_appendices + [ StringsSource(msgids) ] lines = [Module.use(self, **kwargs)] for appendix in localized_appendices: line = SC_OFF + inline_script_tag.format( content=appendix.get_localized_source(c.lang)) + SC_ON lines.append(line) return "\n".join(lines) else: langs = get_lang() or [g.lang] url = LocalizedModule.languagize_path(self.name, langs[0]) return script_tag.format(src=static(url), **kwargs)
def use(self): from r2.lib.template_helpers import static path = [g.static_path, self.name] if g.uncompressedJS: path.insert(1, "js") return script_tag.format(src=static(os.path.join(*path)))
def set_cup(self, cup_info): from r2.lib.template_helpers import static if cup_info is None: return if cup_info.get("expiration", None) is None: return cup_info.setdefault("label_template", "%(user)s recently won a trophy! click here to see it.") cup_info.setdefault("img_url", static('award.png')) existing_info = self.cup_info() if (existing_info and existing_info["expiration"] > cup_info["expiration"]): # The existing award has a later expiration, # so it trumps the new one as far as cups go return td = cup_info["expiration"] - timefromnow("0 seconds") cache_lifetime = td.seconds if cache_lifetime <= 0: g.log.error("Adding a cup that's already expired?") else: g.hardcache.set("cup_info-%d" % self._id, cup_info, cache_lifetime)
def js_config(extra_config=None): config = { # is the user logged in? "logged": c.user_is_loggedin and c.user.name, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # are we in an iframe? "cnameframe": bool(c.cname and not c.authorized_cname), # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "fetch_trackers_url": g.fetch_trackers_url, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), "over_18": bool(c.over18), "vote_hash": c.vote_hash, } if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def stylesheet_url(self): from r2.lib.template_helpers import static, get_domain if self.stylesheet_is_static: return static(self.static_stylesheet_name) else: return "http://%s/stylesheet.css?v=%s" % (get_domain( cname=False, subreddit=True), self.stylesheet_hash)
def use(self): from pylons.i18n import get_lang from r2.lib.template_helpers import static embed = Module.use(self) if g.uncompressedJS: return embed + StringsSource().use() else: url = LocalizedModule.languagize_path(self.name, get_lang()[0]) return script_tag.format(src=static(url))
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name gold = bool(logged and c.user.gold) config = { # is the user logged in? "logged": logged, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # does the client only want to communicate over HTTPS? "https_forced": c.user.https_forced, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "fetch_trackers_url": g.fetch_trackers_url, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), "over_18": bool(c.over18), "new_window": bool(c.user.pref_newwindow), "vote_hash": c.vote_hash, "gold": gold, "has_subscribed": logged and c.user.has_subscribed, } if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def send429(self): retry_after = request.environ.get("retry_after") if retry_after: response.headers["Retry-After"] = str(retry_after) template_name = '/ratelimit_toofast.html' else: template_name = '/ratelimit_throttled.html' template = g.mako_lookup.get_template(template_name) return template.render(logo_url=static(g.default_header_url))
def use(self): from pylons.i18n import get_lang from r2.lib.template_helpers import static embed = Module.use(self) if g.uncompressedJS: return embed + StringsSource().use() else: name, ext = os.path.splitext(self.name) url = os.path.join(g.static_path, name + "." + get_lang()[0] + ext) return script_tag.format(src=static(url))
def send429(self): retry_after = request.environ.get("retry_after") if retry_after: response.headers["Retry-After"] = str(retry_after) template_name = "/ratelimit_toofast.html" else: template_name = "/ratelimit_throttled.html" template = g.mako_lookup.get_template(template_name) return template.render(logo_url=static(g.default_header_url))
def url(self, absolute=False, mangle_name=True): from r2.lib.template_helpers import static if g.uncompressedJS: return [ source.url(absolute=absolute, mangle_name=mangle_name) for source in self.sources ] else: return static(self.name, absolute=absolute, mangle_name=mangle_name)
def send429(self): retry_after = request.environ.get("retry_after") if retry_after: response.headers["Retry-After"] = str(retry_after) template_name = '/ratelimit_toofast.html' else: template_name = '/ratelimit_throttled.html' loader = pylons.buffet.engines['mako']['engine'] template = loader.load_template(template_name) return template.render(logo_url=static(g.default_header_url))
def send429(self): c.response.status_code = 429 if 'retry_after' in request.environ: c.response.headers['Retry-After'] = str(request.environ['retry_after']) template_name = '/ratelimit_toofast.html' else: template_name = '/ratelimit_throttled.html' loader = pylons.buffet.engines['mako']['engine'] template = loader.load_template(template_name) return template.render(logo_url=static(g.default_header_url))
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) c.response.status_code = code return c.response elif c.render_style == "api": data = request.environ.get('extra_error_data', {'error': code}) c.response.content = json.dumps(data) return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': rand.choice(self.admins)} failien_name = 'youbrokeit%d.png' % rand.randint( 1, NUM_FAILIENS) failien_url = static(failien_name) return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: c.response.headers['x-sup-id'] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def js_config(): config = { # is the user logged in? "logged": c.user_is_loggedin and c.user.name, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # are we in an iframe? "cnameframe": bool(c.cname and not c.authorized_cname), # this page's referer "referer": _force_unicode(request.referer) or "", # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "vl": {}, "sr": {}, "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracking_domain": g.tracking_domain, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "static_root": static(''), } return config
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) c.response.status_code = code return c.response elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) c.response.content = websafe_json(json.dumps(data)) return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': rand.choice(self.admins)} failien_name = 'youbrokeit%d.png' % rand.randint(1, NUM_FAILIENS) failien_url = static(failien_name) return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: c.response.headers['x-sup-id'] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name gold = bool(logged and c.user.gold) config = { # is the user logged in? "logged": logged, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "fetch_trackers_url": g.fetch_trackers_url, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), "over_18": bool(c.over18), "vote_hash": c.vote_hash, } if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def __init__(self, focused_update, **kwargs): og_data = { "type": "article", "url": make_event_url(c.liveupdate_event._id), "description": trunc_string( focused_update.body.strip(), MAX_DESCRIPTION_LENGTH), "image": static("liveupdate-logo.png"), "image:width": "300", "image:height": "300", "site_name": "reddit", } LiveUpdateEventPage.__init__( self, og_data=og_data, **kwargs )
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subsciteit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) return c.response elif c.render_style == "api": c.response.content = "{error: %s}" % code return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 500: randmin = {'admin': rand.choice(self.admins)} failien_name = 'youbrokeit%d.png' % rand.randint(1, NUM_FAILIENS) failien_url = static(failien_name) return sciteitbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: c.response.headers['x-sup-id'] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def __init__(self, **kwargs): description = c.liveupdate_event.description or _("real-time updates on %(short_description)s") % dict( short_description=g.short_description ) og_data = { "type": "article", "url": make_event_url(c.liveupdate_event._id), "description": description, "image": static("liveupdate-logo.png"), "image:width": "300", "image:height": "300", "site_name": g.short_description, "ttl": "600", # have this stuff re-fetched frequently } LiveUpdateEventPage.__init__(self, og_data=og_data, short_description=description, **kwargs)
def add_attr(attrs, code, label=None, link=None): from r2.lib.template_helpers import static img = None if code == 'F': priority = 1 cssclass = 'friend' if not label: label = _('friend') if not link: link = '/prefs/friends' elif code == 'S': priority = 2 cssclass = 'submitter' if not label: label = _('submitter') if not link: raise ValueError ("Need a link") elif code == 'M': priority = 3 cssclass = 'moderator' if not label: raise ValueError ("Need a label") if not link: raise ValueError ("Need a link") elif code == 'A': priority = 4 cssclass = 'admin' if not label: label = _('reddit admin, speaking officially') if not link: link = '/help/faq#Whomadereddit' elif code == 'trophy': img = (static('award.png'), '!', 11, 8) priority = 99 cssclass = 'recent-trophywinner' if not label: raise ValueError ("Need a label") if not link: raise ValueError ("Need a link") else: raise ValueError ("Got weird code [%s]" % code) attrs.append( (priority, code, cssclass, label, link, img) )
def __init__(self, **kwargs): description = (c.liveupdate_event.description or _("real-time updates on %(short_description)s") % dict(short_description=g.short_description)) og_data = { "type": "article", "url": make_event_url(c.liveupdate_event._id), "description": description, "image": static("icon.png"), "site_name": g.short_description, "ttl": "600", # have this stuff re-fetched frequently } LiveUpdateEventPage.__init__(self, og_data=og_data, short_description=description, **kwargs)
def _get_images(self): sr = Subreddit._by_name(g.about_sr_images) image_links = self._get_hot_posts(sr, count=g.about_images_count, filter=lambda x: self.image_title_re.match(x.title) and x.score >= g.about_images_min_score) images = [] for image_link in image_links: image = self.image_title_re.match(image_link.title).groupdict() image['url'] = image_link.url image['src'] = getattr(image_link, 'slideshow_src', static('about/slideshow/%s.jpg' % image_link._id36)) image['author_url'] = getattr(image_link, 'author_url', image['url']) image['via'] = image['via'] or image_link.author.name image['via_url'] = '/user/' + image['via'] image['comment_label'], image['comment_class'] = comment_label(image_link.num_comments) image['permalink'] = image_link.permalink images.append(image) return images
def js_config(extra_config=None): config = { # is the user logged in? "logged": c.user_is_loggedin and c.user.name, # the subreddit's name (for posts) "post_site": c.site.name if not c.default_sr else "", # are we in an iframe? "cnameframe": bool(c.cname and not c.authorized_cname), # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "fetch_trackers_url": g.fetch_trackers_url, "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "static_root": static(''), "over_18": bool(c.over18), "vote_hash": c.vote_hash, } if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def use(self): from pylons.i18n import get_lang from r2.lib.template_helpers import static if g.uncompressedJS: if c.lang == "en": # in this case, the msgids *are* the translated strings and we # can save ourselves the pricey step of lexing the js source return Module.use(self) msgids = extract_javascript_msgids(Module.get_source(self)) strings = StringsSource(c.lang, msgids) return "\n".join( (Module.use(self), inline_script_tag.format(content=strings.get_plural_forms()), strings.use()) ) else: langs = get_lang() or [g.lang] url = LocalizedModule.languagize_path(self.name, langs[0]) return script_tag.format(src=static(url))
def _get_images(self): sr = Subreddit._by_name(g.about_sr_images) image_links = self._get_hot_posts(sr, count=g.about_images_count, filter=lambda x: self.image_title_re.match(x.title) and x.score >= g.about_images_min_score) images = [] for image_link in image_links: image = self.image_title_re.match(image_link.title).groupdict() image['url'] = image_link.url default_src = static('about/slideshow/%s.jpg' % image_link._id36) image['src'] = getattr(image_link, 'slideshow_src', default_src) image['author_url'] = getattr(image_link, 'author_url', image['url']) image['via'] = image['via'] or image_link.author.name image['via_url'] = '/user/' + image['via'] image['comment_label'], image['comment_class'] = \ comment_label(image_link.num_comments) image['permalink'] = image_link.permalink images.append(image) return images
def _get_images(self): sr = Subreddit._by_name(g.about_sr_images) image_links = self._get_hot_posts( sr, count=g.about_images_count, filter=lambda x: self.image_title_re.match(x.title) and x.score >= g.about_images_min_score, ) images = [] for image_link in image_links: image = self.image_title_re.match(image_link.title).groupdict() image["url"] = image_link.url image["src"] = getattr(image_link, "slideshow_src", static("about/slideshow/%s.jpg" % image_link._id36)) image["author_url"] = getattr(image_link, "author_url", image["url"]) image["via"] = image["via"] or image_link.author.name image["via_url"] = "/user/" + image["via"] image["comment_label"], image["comment_class"] = comment_label(image_link.num_comments) image["permalink"] = image_link.permalink images.append(image) return images
def use(self): from pylons.i18n import get_lang from r2.lib.template_helpers import static if g.uncompressedJS: if c.lang == "en": # in this case, the msgids *are* the translated strings and we # can save ourselves the pricey step of lexing the js source return Module.use(self) msgids = extract_javascript_msgids(Module.get_source(self)) strings = StringsSource(c.lang, msgids) return "\n".join(( Module.use(self), inline_script_tag.format(content=strings.get_plural_forms()), strings.use(), )) else: langs = get_lang() or [g.lang] url = LocalizedModule.languagize_path(self.name, langs[0]) return script_tag.format(src=static(url))
def use(self): from pylons.i18n import get_lang from r2.lib.template_helpers import static from r2.lib.filters import SC_OFF, SC_ON if g.uncompressedJS: if c.lang == "en" or c.lang not in g.all_languages: # in this case, the msgids *are* the translated strings and we # can save ourselves the pricey step of lexing the js source return Module.use(self) msgids = extract_javascript_msgids(Module.get_source(self)) localized_appendices = self.localized_appendices + [StringsSource(msgids)] lines = [Module.use(self)] for appendix in localized_appendices: line = SC_OFF + inline_script_tag.format(content=appendix.get_localized_source(c.lang)) + SC_ON lines.append(line) return "\n".join(lines) else: langs = get_lang() or [g.lang] url = LocalizedModule.languagize_path(self.name, langs[0]) return script_tag.format(src=static(url))
def make_failien_url(): #failien_number = random.randint(1, FAILIEN_COUNT) #failien_name = "youbrokeit%d.png" % failien_number return static("youbrokeblockpath.png")
def make_failien_url(): failien_number = random.randint(1, FAILIEN_COUNT) failien_name = "youbrokeit%d.png" % failien_number return static(failien_name)
# don't mangle paths if dirname: return path + query if g.uncompressedJS: extension = path.split(".")[1:] if extension and extension[-1] in ("js", "css"): return os.path.join(c.site.static_path, extension[-1], path) + query return os.path.join(c.site.static_path, path) + query external_resources = { "jquery.js": { True: lambda: static("jquery.js"), False: lambda: "http://ajax.googleapis.com/ajax/libs/jquery/1.6.1/jquery.min.js" } } def external(name): """ Look up a named external URL from a static mapping. Helper for making frequently used URLs consistent across templates. """ return external_resources[name][c.secure]() def s3_https_if_secure(url): return url if not c.secure else url.replace("http://", "https://s3.amazonaws.com/")
def header_url(url): if url == g.default_header_url: return static(url) else: return make_url_protocol_relative(url)
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() cur_subreddit = "" if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # the subreddit's name (for posts) "post_site": cur_subreddit, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # does the client only want to communicate over HTTPS? "https_forced": c.user.https_forced, # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "static_root": static(''), "over_18": bool(c.over18), "new_window": bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "vote_hash": c.vote_hash, "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, }, "facebook_app_id": g.live_config["facebook_app_id"], } if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def js_config(extra_config=None): from r2.lib import promote logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name stats_name = route_name + ('' if promote.ads_enabled() else '.no_ads') banners_enabled = promote.banners_enabled(c.site, c.user) cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) share_ts = int(time.time() * 1000) share_tracking_hmac = None # Only enable for comments pages on desktop if (feature.is_enabled("url_share_tracking") and c.render_style == "html" and action_name == "GET_comments"): share_hash_msg = "%s" % share_ts if user_id: share_hash_msg = "%s|%s" % (user_id, share_ts) elif c.loid.loid: share_hash_msg = "%s|%s" % (c.loid.loid, share_ts) share_tracking_hmac = hmac.new(g.secrets["share_tracking"], share_hash_msg, hashlib.sha1).hexdigest() mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() stats_mac = hmac.new(g.secrets["action_name"], stats_name, hashlib.sha1) stats_verification = stats_mac.hexdigest() cur_subreddit = "" cur_sr_fullname = "" cur_listing = "" listing_over_18 = False pref_no_profanity = not logged or c.user.pref_no_profanity pref_media_preview = c.user.pref_media_preview if logged else "subreddit" if pref_media_preview == "subreddit": expando_preference = "subreddit_default" elif pref_media_preview == "on": expando_preference = "auto_expand" else: expando_preference = "do_not_expand" pref_beta = logged and c.user.pref_beta lazy_load_listings = (c.user.pref_numsites == 25 and controller_name in ['hot', 'new'] and feature.is_enabled("lazy_load_listings")) feature_frontpage_tagline = feature.is_enabled("frontpage_tagline") nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subreddit listing_over_18 = c.site.over_18 elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubreddit): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] if feature.is_enabled("live_orangereds") and logged: user_websocket_url = websockets.make_url("/user/%s" % c.user._id36, max_age=24 * 60 * 60) else: user_websocket_url = None config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subreddit's name (for posts) "post_site": cur_subreddit, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "feature_outbound_beacons": feature.is_enabled('outbound_beacons'), "feature_scroll_events": feature.is_enabled('scroll_events'), "static_root": static(''), "over_18": bool(c.over18), "listing_over_18": listing_over_18, "expando_preference": expando_preference, "pref_no_profanity": pref_no_profanity, "pref_beta": pref_beta, "nsfw_media_acknowledged": nsfw_media_acknowledged, "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, "statsName": stats_name, "statsVerification": stats_verification, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_expando_events": feature.is_enabled('expando_events'), "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "feature_ads_user_matching": feature.is_enabled('ads_user_matching'), "feature_flatlist_events": feature.is_enabled('flatlist_events'), "feature_mobile_native_banner": feature.is_enabled('mobile_native_banner'), "email_verified": logged and c.user.email and c.user.email_verified, "feature_post_embed": feature.is_enabled('post_embed'), "ad_serving_events_sample_rate": g.live_config.get("events_collector_ad_serving_sample_rate", 0), "share_tracking_hmac": share_tracking_hmac, "share_tracking_ts": share_ts, "user_websocket_url": user_websocket_url, "live_orangereds_pref": c.user.pref_live_orangereds, "pref_email_messages": logged and c.user.pref_email_messages, "feature_double_sidebar": banners_enabled and not isinstance(c.site, FakeSubreddit), # noqa "feature_lazy_load_listings": lazy_load_listings, "ads_loading_timeout_ms": g.live_config.get("ads_loading_timeout_ms", 1000), "feature_frontpage_tagline": feature_frontpage_tagline, } if feature.is_enabled("eu_cookie_policy"): config.update({ "requires_eu_cookie_policy": geoip.requires_eu_cookie_policy(request, c), "eu_cookie": g.eu_cookie, "eu_cookie_max_attempts": g.eu_cookie_max_attempts, }) if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def header_url(url): if url == g.default_header_url: return static(url) else: return media_https_if_secure(url)
def use(self): from r2.lib.template_helpers import static if g.uncompressedJS: return "".join(source.use() for source in self.sources) else: return script_tag.format(src=static(self.name))
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] mac = hmac.new(g.secrets["action_name"], controller_name + '.' + action_name, hashlib.sha1) verification = mac.hexdigest() cur_subreddit = "" if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # the subreddit's name (for posts) "post_site": cur_subreddit, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, # does the client only want to communicate over HTTPS? "https_forced": c.user.https_forced, # debugging? "debug": g.debug, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": tracking.get_pageview_pixel_url() or '', "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "static_root": static(''), "over_18": bool(c.over18), "new_window": bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "vote_hash": c.vote_hash, "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": controller_name + '.' + action_name, }, "facebook_app_id": g.live_config["facebook_app_id"], } if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def js_config(extra_config=None): logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() cur_subreddit = "" cur_sr_fullname = "" cur_listing = "" if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subreddit elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubreddit): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subreddit's name (for posts) "post_site": cur_subreddit, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "static_root": static(''), "over_18": bool(c.over18), "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "email_verified": logged and c.user.email and c.user.email_verified, } if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config