def thing_attr(self, thing, attr): if attr == "_fullname": return "LiveUpdateEvent_" + thing._id elif attr == "viewer_count": if thing.state == "live": return thing.active_visitors else: return None elif attr == "viewer_count_fuzzed": if thing.state == "live": return thing.active_visitors_fuzzed else: return None elif attr == "description_html": return filters.spaceCompress( filters.safemarkdown(thing.description, nofollow=True) or "") elif attr == "resources_html": return filters.spaceCompress( filters.safemarkdown(thing.resources, nofollow=True) or "") elif attr == "websocket_url": if thing.state == "live": return websockets.make_url( "/live/" + c.liveupdate_event._id, max_age=24 * 60 * 60) else: return None else: return ThingJsonTemplate.thing_attr(self, thing, attr)
def GET_listing(self, num, after, before, count): reverse = False if before: reverse = True after = before query = LiveUpdateStream.query([c.liveupdate_event._id], count=num, reverse=reverse) if after: query.column_start = after builder = LiveUpdateBuilder(query=query, skip=True, reverse=reverse, num=num, count=count) listing = pages.LiveUpdateListing(builder) content = pages.LiveUpdateEvent( event=c.liveupdate_event, listing=listing.listing(), ) # don't generate a url unless this is the main page of an event websocket_url = None if c.liveupdate_event.state == "live" and not after and not before: websocket_url = websockets.make_url( "/live/" + c.liveupdate_event._id, max_age=24 * 60 * 60) return pages.LiveUpdatePage( content=content, websocket_url=websocket_url, ).render()
def thing_attr(self, thing, attr): if attr == "_fullname": return "LiveUpdateEvent_" + thing._id elif attr == "viewer_count": if thing.state == "live": return thing.active_visitors else: return None elif attr == "viewer_count_fuzzed": if thing.state == "live": return thing.active_visitors_fuzzed else: return None elif attr == "total_views": # this requires an extra query, so we'll only show it in places # where we're just getting one event. if not hasattr(thing, "total_views"): return None return thing.total_views elif attr == "description_html": return filters.spaceCompress( filters.safemarkdown(thing.description, nofollow=True) or "") elif attr == "resources_html": return filters.spaceCompress( filters.safemarkdown(thing.resources, nofollow=True) or "") elif attr == "websocket_url": if thing.state == "live": return websockets.make_url("/live/" + thing._id, max_age=24 * 60 * 60) else: return None else: return ThingJsonTemplate.thing_attr(self, thing, attr)
def thing_attr(self, thing, attr): if attr == "_fullname": return "LiveUpdateEvent_" + thing._id elif attr == "viewer_count": if thing.state == "live": return thing.active_visitors else: return None elif attr == "viewer_count_fuzzed": if thing.state == "live": return thing.active_visitors_fuzzed else: return None elif attr == "total_views": # this requires an extra query, so we'll only show it in places # where we're just getting one event. if not hasattr(thing, "total_views"): return None return thing.total_views elif attr == "description_html": return filters.spaceCompress( filters.safemarkdown(thing.description, nofollow=True) or "") elif attr == "resources_html": return filters.spaceCompress( filters.safemarkdown(thing.resources, nofollow=True) or "") elif attr == "websocket_url": if thing.state == "live": return websockets.make_url( "/live/" + thing._id, max_age=24 * 60 * 60) else: return None else: return ThingJsonTemplate.thing_attr(self, thing, attr)
def thing_attr(self, thing, attr): if attr == "_fullname": return "LiveUpdateEvent_" + thing._id elif attr == "viewer_count": if thing.state == "live": return thing.active_visitors else: return None elif attr == "viewer_count_fuzzed": if thing.state == "live": return thing.active_visitors_fuzzed else: return None elif attr == "description_html": return filters.spaceCompress( filters.safemarkdown(thing.description, nofollow=True) or "") elif attr == "resources_html": return filters.spaceCompress( filters.safemarkdown(thing.resources, nofollow=True) or "") elif attr == "websocket_url": if thing.state == "live": return websockets.make_url( "/live/" + thing._id, max_age=24 * 60 * 60) else: return None else: return ThingJsonTemplate.thing_attr(self, thing, attr)
def GET_canvasse(self, is_embed, is_webview, is_palette_hidden): # oauth will try to force the response into json # undo that here by hacking extension, content_type, and render_style try: del (request.environ['extension']) except: pass request.environ['content_type'] = "text/html; charset=UTF-8" request.environ['render_style'] = "html" set_content_type() websocket_url = websockets.make_url("/place", max_age=3600) content = PlaceCanvasse() js_config = { "place_websocket_url": websocket_url, "place_canvas_width": CANVAS_WIDTH, "place_canvas_height": CANVAS_HEIGHT, "place_cooldown": 0 if c.user_is_admin else PIXEL_COOLDOWN_SECONDS, "place_fullscreen": is_embed or is_webview, "place_hide_ui": is_palette_hidden, } if c.user_is_loggedin and not c.user_is_admin: js_config["place_wait_seconds"] = get_wait_seconds(c.user) # this is a sad duplication of the same from reddit_base :( if c.user_is_loggedin: PLACE_SUBREDDIT.record_visitor_activity("logged_in", c.user._fullname) elif c.loid.serializable: PLACE_SUBREDDIT.record_visitor_activity("logged_out", c.loid.loid) try: js_config["place_active_visitors"] = get_activity_count() except ActivityError: pass if is_embed: # ensure we're off the cookie domain before allowing embedding if request.host != g.media_domain: abort(404) c.allow_framing = True if is_embed or is_webview: return PlaceEmbedPage( title="place", content=content, extra_js_config=js_config, ).render() else: return PlacePage( title="place", content=content, extra_js_config=js_config, ).render()
def GET_listing(self, num, after, before, count, is_embed): reverse = False if before: reverse = True after = before query = LiveUpdateStream.query([c.liveupdate_event._id], count=num, reverse=reverse) if after: query.column_start = after builder = LiveUpdateBuilder(query=query, skip=True, reverse=reverse, num=num, count=count) listing = pages.LiveUpdateListing(builder) wrapped_listing = listing.listing() content = pages.LiveUpdateEventPage( event=c.liveupdate_event, listing=wrapped_listing, show_sidebar=not is_embed, ) c.js_preload.set_wrapped( "/live/" + c.liveupdate_event._id + "/about.json", Wrapped(c.liveupdate_event), ) c.js_preload.set_wrapped( "/live/" + c.liveupdate_event._id + ".json", wrapped_listing, ) # don't generate a url unless this is the main page of an event websocket_url = None if c.liveupdate_event.state == "live" and not after and not before: websocket_url = websockets.make_url( "/live/" + c.liveupdate_event._id, max_age=24 * 60 * 60) if not is_embed: return pages.LiveUpdatePage( content=content, websocket_url=websocket_url, page_classes=['liveupdate-event'], ).render() else: # ensure we're off the cookie domain before allowing embedding if request.host != g.media_domain: abort(404) c.allow_framing = True return pages.LiveUpdateEmbed( content=content, websocket_url=websocket_url, page_classes=['liveupdate-event'], ).render()
def add_place_config(config): if c.site.name == PLACE_SUBREDDIT.name: cooldown = 0 if c.user_is_admin else PIXEL_COOLDOWN_SECONDS websocket_url = websockets.make_url("/place", max_age=3600) config["place_websocket_url"] = websocket_url config["place_canvas_width"] = CANVAS_WIDTH config["place_canvas_height"] = CANVAS_HEIGHT config["place_cooldown"] = cooldown if c.user_is_loggedin and not c.user_is_admin: config["place_wait_seconds"] = get_wait_seconds(c.user) try: config["place_active_visitors"] = get_activity_count() except ActivityError: pass
def GET_listing(self, num, after, before, count, is_embed): reverse = False if before: reverse = True after = before query = LiveUpdateStream.query([c.liveupdate_event._id], count=num, reverse=reverse) if after: query.column_start = after builder = LiveUpdateBuilder(query=query, skip=True, reverse=reverse, num=num, count=count) listing = pages.LiveUpdateListing(builder) content = pages.LiveUpdateEvent( event=c.liveupdate_event, listing=listing.listing(), show_sidebar=not is_embed, ) # don't generate a url unless this is the main page of an event websocket_url = None if c.liveupdate_event.state == "live" and not after and not before: websocket_url = websockets.make_url( "/live/" + c.liveupdate_event._id, max_age=24 * 60 * 60) if not is_embed: return pages.LiveUpdatePage( content=content, websocket_url=websocket_url, ).render() else: # embeds are always logged out and therefore safe for frames. c.liveupdate_can_manage = False c.liveupdate_can_edit = False c.allow_framing = True return pages.LiveUpdateEmbed( content=content, websocket_url=websocket_url, ).render()
def _get_chat_page(self, room): path = posixpath.join("/robin", room.id, c.user._id36) websocket_url = websockets.make_url(path, max_age=3600) all_user_ids = room.get_all_participants() all_present_ids = room.get_present_participants() all_votes = room.get_all_votes() users = Account._byID(all_user_ids, data=True, stale=True) user_list = [] for user in users.itervalues(): if user._id in all_votes: vote = all_votes.get(user._id) else: vote = None user_list.append({ "name": user.name, "present": user._id in all_present_ids, "vote": vote, }); return RobinChatPage( title="chat in %s" % room.name, content=RobinChat(room=room), extra_js_config={ "robin_room_is_continued": room.is_continued, "robin_room_name": room.name, "robin_room_id": room.id, "robin_websocket_url": websocket_url, "robin_user_list": user_list, "robin_room_date": js_timestamp(room.date), "robin_room_reap_time": js_timestamp(get_reap_time(room)), }, ).render()
def _get_chat_page(self, room): path = posixpath.join("/robin", room.id, c.user._id36) websocket_url = websockets.make_url(path, max_age=3600) all_user_ids = room.get_all_participants() all_present_ids = room.get_present_participants() all_votes = room.get_all_votes() users = Account._byID(all_user_ids, data=True, stale=True) user_list = [] for user in users.itervalues(): if user._id in all_votes: vote = all_votes.get(user._id) else: vote = None user_list.append({ "name": user.name, "present": user._id in all_present_ids, "vote": vote, }) return RobinChatPage( title="chat in %s" % room.name, content=RobinChat(room=room), extra_js_config={ "robin_room_is_continued": room.is_continued, "robin_room_name": room.name, "robin_room_id": room.id, "robin_websocket_url": websocket_url, "robin_user_list": user_list, "robin_room_date": js_timestamp(room.date), "robin_room_reap_time": js_timestamp(get_reap_time(room)), }, ).render()
def add_js_config(config): if getattr(c.site, '_id', None) == g.live_config["thebutton_srid"]: config['thebutton_websocket'] = websockets.make_url("/thebutton", max_age=24 * 60 * 60)
def __init__(self, content): websocket_url = websockets.make_url("/thebutton", max_age=24 * 60 * 60) extra_js_config = {"thebutton_websocket": websocket_url} Reddit.__init__(self, content=content, extra_js_config=extra_js_config)
def js_config(extra_config=None): from r2.lib import promote logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name stats_name = route_name + ('' if promote.ads_enabled() else '.no_ads') banners_enabled = promote.banners_enabled(c.site, c.user) cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) share_ts = int(time.time() * 1000) share_tracking_hmac = None # Only enable for comments pages on desktop if (feature.is_enabled("url_share_tracking") and c.render_style == "html" and action_name == "GET_comments"): share_hash_msg = "%s" % share_ts if user_id: share_hash_msg = "%s|%s" % (user_id, share_ts) elif c.loid.loid: share_hash_msg = "%s|%s" % (c.loid.loid, share_ts) share_tracking_hmac = hmac.new(g.secrets["share_tracking"], share_hash_msg, hashlib.sha1).hexdigest() mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() stats_mac = hmac.new(g.secrets["action_name"], stats_name, hashlib.sha1) stats_verification = stats_mac.hexdigest() cur_subreddit = "" cur_sr_fullname = "" cur_listing = "" listing_over_18 = False pref_no_profanity = not logged or c.user.pref_no_profanity pref_media_preview = c.user.pref_media_preview if logged else "subreddit" if pref_media_preview == "subreddit": expando_preference = "subreddit_default" elif pref_media_preview == "on": expando_preference = "auto_expand" else: expando_preference = "do_not_expand" pref_beta = logged and c.user.pref_beta lazy_load_listings = ( c.user.pref_numsites == 25 and controller_name in ['hot', 'new'] and feature.is_enabled("lazy_load_listings") ) feature_frontpage_tagline = feature.is_enabled("frontpage_tagline") nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subreddit listing_over_18 = c.site.over_18 elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubreddit): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] if feature.is_enabled("live_orangereds") and logged: user_websocket_url = websockets.make_url("/user/%s" % c.user._id36, max_age=24 * 60 * 60) else: user_websocket_url = None config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subreddit's name (for posts) "post_site": cur_subreddit, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "feature_outbound_beacons": feature.is_enabled('outbound_beacons'), "feature_scroll_events": feature.is_enabled('scroll_events'), "static_root": static(''), "over_18": bool(c.over18), "listing_over_18": listing_over_18, "expando_preference": expando_preference, "pref_no_profanity": pref_no_profanity, "pref_beta": pref_beta, "nsfw_media_acknowledged": nsfw_media_acknowledged, "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, "statsName": stats_name, "statsVerification": stats_verification, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_expando_events": feature.is_enabled('expando_events'), "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "feature_ads_user_matching": feature.is_enabled('ads_user_matching'), "feature_flatlist_events": feature.is_enabled('flatlist_events'), "feature_mobile_native_banner": feature.is_enabled( 'mobile_native_banner'), "email_verified": logged and c.user.email and c.user.email_verified, "feature_post_embed": feature.is_enabled('post_embed'), "ad_serving_events_sample_rate": g.live_config.get( "events_collector_ad_serving_sample_rate", 0), "share_tracking_hmac": share_tracking_hmac, "share_tracking_ts": share_ts, "user_websocket_url": user_websocket_url, "live_orangereds_pref": c.user.pref_live_orangereds, "pref_email_messages": logged and c.user.pref_email_messages, "feature_double_sidebar": banners_enabled and not isinstance(c.site, FakeSubreddit), # noqa "feature_lazy_load_listings": lazy_load_listings, "ads_loading_timeout_ms": g.live_config.get( "ads_loading_timeout_ms", 1000), "feature_frontpage_tagline": feature_frontpage_tagline, } if feature.is_enabled("eu_cookie_policy"): config.update({ "requires_eu_cookie_policy": geoip.requires_eu_cookie_policy(request, c), "eu_cookie": g.eu_cookie, "eu_cookie_max_attempts": g.eu_cookie_max_attempts, }) if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config
def js_config(extra_config=None): from r2.lib import promote logged = c.user_is_loggedin and c.user.name user_id = c.user_is_loggedin and c.user._id user_in_timeout = c.user_is_loggedin and c.user.in_timeout gold = bool(logged and c.user.gold) controller_name = request.environ['pylons.routes_dict']['controller'] action_name = request.environ['pylons.routes_dict']['action'] route_name = controller_name + '.' + action_name stats_name = route_name + ('' if promote.ads_enabled() else '.no_ads') banners_enabled = promote.banners_enabled(c.site, c.user) cache_policy = "loggedout_www" if c.user_is_loggedin: cache_policy = "loggedin_www_new" # Canary for detecting cache poisoning poisoning_canary = None poisoning_report_mac = None if logged: if "pc" in c.cookies and len(c.cookies["pc"].value) == 2: poisoning_canary = c.cookies["pc"].value poisoning_report_mac = make_poisoning_report_mac( poisoner_canary=poisoning_canary, poisoner_name=logged, poisoner_id=user_id, cache_policy=cache_policy, source="web", route_name=route_name, ) share_ts = int(time.time() * 1000) share_tracking_hmac = None # Only enable for comments pages on desktop if (feature.is_enabled("url_share_tracking") and c.render_style == "html" and action_name == "GET_comments"): share_hash_msg = "%s" % share_ts if user_id: share_hash_msg = "%s|%s" % (user_id, share_ts) elif c.loid.loid: share_hash_msg = "%s|%s" % (c.loid.loid, share_ts) share_tracking_hmac = hmac.new(g.secrets["share_tracking"], share_hash_msg, hashlib.sha1).hexdigest() mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1) verification = mac.hexdigest() stats_mac = hmac.new(g.secrets["action_name"], stats_name, hashlib.sha1) stats_verification = stats_mac.hexdigest() cur_subreddit = "" cur_sr_fullname = "" cur_listing = "" listing_over_18 = False pref_no_profanity = not logged or c.user.pref_no_profanity pref_media_preview = c.user.pref_media_preview if logged else "subreddit" if pref_media_preview == "subreddit": expando_preference = "subreddit_default" elif pref_media_preview == "on": expando_preference = "auto_expand" else: expando_preference = "do_not_expand" pref_beta = logged and c.user.pref_beta lazy_load_listings = (c.user.pref_numsites == 25 and controller_name in ['hot', 'new'] and feature.is_enabled("lazy_load_listings")) feature_frontpage_tagline = feature.is_enabled("frontpage_tagline") nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged if isinstance(c.site, Subreddit) and not c.default_sr: cur_subreddit = c.site.name cur_sr_fullname = c.site._fullname cur_listing = cur_subreddit listing_over_18 = c.site.over_18 elif isinstance(c.site, DefaultSR): cur_listing = "frontpage" elif isinstance(c.site, FakeSubreddit): cur_listing = c.site.name if g.debug: events_collector_url = g.events_collector_test_url events_collector_key = g.secrets['events_collector_test_js_key'] events_collector_secret = g.secrets['events_collector_test_js_secret'] else: events_collector_url = g.events_collector_url events_collector_key = g.secrets['events_collector_js_key'] events_collector_secret = g.secrets['events_collector_js_secret'] if feature.is_enabled("live_orangereds") and logged: user_websocket_url = websockets.make_url("/user/%s" % c.user._id36, max_age=24 * 60 * 60) else: user_websocket_url = None config = { # is the user logged in? "logged": logged, # logged in user's id "user_id": user_id, # is user in timeout? "user_in_timeout": user_in_timeout, # the subreddit's name (for posts) "post_site": cur_subreddit, "cur_site": cur_sr_fullname, "cur_listing": cur_listing, # the user's voting hash "modhash": c.modhash or False, # the current rendering style "renderstyle": c.render_style, # they're welcome to try to override this in the DOM because we just # disable the features server-side if applicable 'store_visits': gold and c.user.pref_store_visits, # current domain "cur_domain": get_domain(subreddit=False, no_www=True), # where do ajax requests go? "ajax_domain": get_domain(subreddit=False), "stats_domain": g.stats_domain or '', "stats_sample_rate": g.stats_sample_rate or 0, "extension": c.extension, "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint, "media_domain": g.media_domain, # does the client only want to communicate over HTTPS? "https_forced": feature.is_enabled("force_https"), # debugging? "debug": g.debug, "poisoning_canary": poisoning_canary, "poisoning_report_mac": poisoning_report_mac, "cache_policy": cache_policy, "send_logs": g.live_config["frontend_logging"], "server_time": math.floor(time.time()), "status_msg": { "fetching": _("fetching title..."), "submitting": _("submitting..."), "loading": _("loading...") }, "is_fake": isinstance(c.site, FakeSubreddit), "tracker_url": "", # overridden below if configured "adtracker_url": g.adtracker_url, "clicktracker_url": g.clicktracker_url, "uitracker_url": g.uitracker_url, "eventtracker_url": g.eventtracker_url, "anon_eventtracker_url": g.anon_eventtracker_url, "events_collector_url": events_collector_url, "events_collector_key": events_collector_key, "events_collector_secret": events_collector_secret, "feature_screenview_events": feature.is_enabled('screenview_events'), "feature_outbound_beacons": feature.is_enabled('outbound_beacons'), "feature_scroll_events": feature.is_enabled('scroll_events'), "static_root": static(''), "over_18": bool(c.over18), "listing_over_18": listing_over_18, "expando_preference": expando_preference, "pref_no_profanity": pref_no_profanity, "pref_beta": pref_beta, "nsfw_media_acknowledged": nsfw_media_acknowledged, "new_window": logged and bool(c.user.pref_newwindow), "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'], "gold": gold, "has_subscribed": logged and c.user.has_subscribed, "is_sponsor": logged and c.user_is_sponsor, "pageInfo": { "verification": verification, "actionName": route_name, "statsName": stats_name, "statsVerification": stats_verification, }, "facebook_app_id": g.live_config["facebook_app_id"], "feature_expando_events": feature.is_enabled('expando_events'), "feature_new_report_dialog": feature.is_enabled('new_report_dialog'), "feature_ads_user_matching": feature.is_enabled('ads_user_matching'), "feature_flatlist_events": feature.is_enabled('flatlist_events'), "feature_mobile_native_banner": feature.is_enabled('mobile_native_banner'), "email_verified": logged and c.user.email and c.user.email_verified, "feature_post_embed": feature.is_enabled('post_embed'), "ad_serving_events_sample_rate": g.live_config.get("events_collector_ad_serving_sample_rate", 0), "share_tracking_hmac": share_tracking_hmac, "share_tracking_ts": share_ts, "user_websocket_url": user_websocket_url, "live_orangereds_pref": c.user.pref_live_orangereds, "pref_email_messages": logged and c.user.pref_email_messages, "feature_double_sidebar": banners_enabled and not isinstance(c.site, FakeSubreddit), # noqa "feature_lazy_load_listings": lazy_load_listings, "ads_loading_timeout_ms": g.live_config.get("ads_loading_timeout_ms", 1000), "feature_frontpage_tagline": feature_frontpage_tagline, } if feature.is_enabled("eu_cookie_policy"): config.update({ "requires_eu_cookie_policy": geoip.requires_eu_cookie_policy(request, c), "eu_cookie": g.eu_cookie, "eu_cookie_max_attempts": g.eu_cookie_max_attempts, }) if g.tracker_url: config["tracker_url"] = tracking.get_pageview_pixel_url() if g.uncompressedJS: config["uncompressedJS"] = True if extra_config: config.update(extra_config) hooks.get_hook("js_config").call(config=config) return config