Beispiel #1
0
    def POST_report_cache_poisoning(
        self,
        report_mac,
        poisoner_name,
        poisoner_id,
        poisoner_canary,
        victim_canary,
        render_time,
        route_name,
        url,
        source,
        cache_policy,
        resp_headers,
    ):
        """Report an instance of cache poisoning and its details"""

        self.OPTIONS_report_cache_poisoning()

        if c.errors:
            abort(400)

        # prevent simple CSRF by requiring a custom header
        if not request.headers.get('X-Loggit'):
            abort(403)

        # Eh? Why are you reporting this if the canaries are the same?
        if poisoner_canary == victim_canary:
            abort(400)

        expected_mac = make_poisoning_report_mac(
            poisoner_canary=poisoner_canary,
            poisoner_name=poisoner_name,
            poisoner_id=poisoner_id,
            cache_policy=cache_policy,
            source=source,
            route_name=route_name,
        )
        if not constant_time_compare(report_mac, expected_mac):
            abort(403)

        if resp_headers:
            try:
                resp_headers = json.loads(resp_headers)
                # Verify this is a JSON map of `header_name => [value, ...]`
                if not isinstance(resp_headers, dict):
                    abort(400)
                for hdr_name, hdr_vals in resp_headers.iteritems():
                    if not isinstance(hdr_name, basestring):
                        abort(400)
                    if not all(isinstance(h, basestring) for h in hdr_vals):
                        abort(400)
            except ValueError:
                abort(400)

        if not resp_headers:
            resp_headers = {}

        poison_info = dict(
            poisoner_name=poisoner_name,
            poisoner_id=str(poisoner_id),
            # Convert the JS timestamp to a standard one
            render_time=render_time * 1000,
            route_name=route_name,
            url=url,
            source=source,
            cache_policy=cache_policy,
            resp_headers=resp_headers,
        )

        # For immediate feedback when tracking the effects of caching changes
        g.stats.simple_event("cache.poisoning.%s.%s" % (source, cache_policy))
        # For longer-term diagnosing of caching issues
        g.events.cache_poisoning_event(poison_info, request=request, context=c)

        VRatelimit.ratelimit(rate_ip=True, prefix="rate_poison_", seconds=10)

        return self.api_wrapper({})
def js_config(extra_config=None):
    logged = c.user_is_loggedin and c.user.name
    user_id = c.user_is_loggedin and c.user._id
    gold = bool(logged and c.user.gold)
    controller_name = request.environ['pylons.routes_dict']['controller']
    action_name = request.environ['pylons.routes_dict']['action']
    route_name = controller_name + '.' + action_name

    cache_policy = "loggedout_www"
    if c.user_is_loggedin:
        cache_policy = "loggedin_www_new"

    # Canary for detecting cache poisoning
    poisoning_canary = None
    poisoning_report_mac = None
    if logged:
        if "pc" in c.cookies and len(c.cookies["pc"].value) == 2:
            poisoning_canary = c.cookies["pc"].value
            poisoning_report_mac = make_poisoning_report_mac(
                poisoner_canary=poisoning_canary,
                poisoner_name=logged,
                poisoner_id=user_id,
                cache_policy=cache_policy,
                source="web",
                route_name=route_name,
            )

    mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1)
    verification = mac.hexdigest()
    cur_subreddit = ""
    if isinstance(c.site, Subreddit) and not c.default_sr:
        cur_subreddit = c.site.name

    config = {
        # is the user logged in?
        "logged": logged,
        # logged in user's id
        "user_id": user_id,
        # the subreddit's name (for posts)
        "post_site": cur_subreddit,
        # the user's voting hash
        "modhash": c.modhash or False,
        # the current rendering style
        "renderstyle": c.render_style,

        # they're welcome to try to override this in the DOM because we just
        # disable the features server-side if applicable
        'store_visits': gold and c.user.pref_store_visits,

        # current domain
        "cur_domain": get_domain(cname=c.frameless_cname, subreddit=False, no_www=True),
        # where do ajax requests go?
        "ajax_domain": get_domain(cname=c.authorized_cname, subreddit=False),
        "stats_domain": g.stats_domain or '',
        "stats_sample_rate": g.stats_sample_rate or 0,
        "extension": c.extension,
        "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint,
        # does the client only want to communicate over HTTPS?
        "https_forced": c.user.https_forced,
        # debugging?
        "debug": g.debug,
        "poisoning_canary": poisoning_canary,
        "poisoning_report_mac": poisoning_report_mac,
        "cache_policy": cache_policy,
        "send_logs": g.live_config["frontend_logging"],
        "server_time": math.floor(time.time()),
        "status_msg": {
          "fetching": _("fetching title..."),
          "submitting": _("submitting..."),
          "loading": _("loading...")
        },
        "is_fake": isinstance(c.site, FakeSubreddit),
        "tracker_url": "",  # overridden below if configured
        "adtracker_url": g.adtracker_url,
        "clicktracker_url": g.clicktracker_url,
        "uitracker_url": g.uitracker_url,
        "eventtracker_url": g.eventtracker_url,
        "anon_eventtracker_url": g.anon_eventtracker_url,
        "static_root": static(''),
        "over_18": bool(c.over18),
        "new_window": bool(c.user.pref_newwindow),
        "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'],
        "vote_hash": c.vote_hash,
        "gold": gold,
        "has_subscribed": logged and c.user.has_subscribed,
        "is_sponsor": logged and c.user_is_sponsor,
        "pageInfo": {
          "verification": verification,
          "actionName": route_name,
        },
        "facebook_app_id": g.live_config["facebook_app_id"],
    }

    if g.tracker_url:
        config["tracker_url"] = tracking.get_pageview_pixel_url()

    if g.uncompressedJS:
        config["uncompressedJS"] = True

    if extra_config:
        config.update(extra_config)

    hooks.get_hook("js_config").call(config=config)

    return config
def js_config(extra_config=None):
    logged = c.user_is_loggedin and c.user.name
    user_id = c.user_is_loggedin and c.user._id
    user_in_timeout = c.user_is_loggedin and c.user.in_timeout
    gold = bool(logged and c.user.gold)
    controller_name = request.environ['pylons.routes_dict']['controller']
    action_name = request.environ['pylons.routes_dict']['action']
    route_name = controller_name + '.' + action_name

    cache_policy = "loggedout_www"
    if c.user_is_loggedin:
        cache_policy = "loggedin_www_new"

    # Canary for detecting cache poisoning
    poisoning_canary = None
    poisoning_report_mac = None
    if logged:
        if "pc" in c.cookies and len(c.cookies["pc"].value) == 2:
            poisoning_canary = c.cookies["pc"].value
            poisoning_report_mac = make_poisoning_report_mac(
                poisoner_canary=poisoning_canary,
                poisoner_name=logged,
                poisoner_id=user_id,
                cache_policy=cache_policy,
                source="web",
                route_name=route_name,
            )

    mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1)
    verification = mac.hexdigest()
    cur_subreddit = ""
    cur_sr_fullname = ""
    cur_listing = ""

    if isinstance(c.site, Subreddit) and not c.default_sr:
        cur_subreddit = c.site.name
        cur_sr_fullname = c.site._fullname
        cur_listing = cur_subreddit
    elif isinstance(c.site, DefaultSR):
        cur_listing = "frontpage"
    elif isinstance(c.site, FakeSubreddit):
        cur_listing = c.site.name

    if g.debug:
        events_collector_url = g.events_collector_test_url
        events_collector_key = g.secrets['events_collector_test_js_key']
        events_collector_secret = g.secrets['events_collector_test_js_secret']
    else:
        events_collector_url = g.events_collector_url
        events_collector_key = g.secrets['events_collector_js_key']
        events_collector_secret = g.secrets['events_collector_js_secret']

    config = {
        # is the user logged in?
        "logged": logged,
        # logged in user's id
        "user_id": user_id,
        # is user in timeout?
        "user_in_timeout": user_in_timeout,
        # the subreddit's name (for posts)
        "post_site": cur_subreddit,
        "cur_site": cur_sr_fullname,
        "cur_listing": cur_listing,
        # the user's voting hash
        "modhash": c.modhash or False,
        # the current rendering style
        "renderstyle": c.render_style,

        # they're welcome to try to override this in the DOM because we just
        # disable the features server-side if applicable
        'store_visits': gold and c.user.pref_store_visits,

        # current domain
        "cur_domain": get_domain(subreddit=False, no_www=True),
        # where do ajax requests go?
        "ajax_domain": get_domain(subreddit=False),
        "stats_domain": g.stats_domain or '',
        "stats_sample_rate": g.stats_sample_rate or 0,
        "extension": c.extension,
        "https_endpoint": is_subdomain(request.host, g.domain)
        and g.https_endpoint,
        "media_domain": g.media_domain,
        # does the client only want to communicate over HTTPS?
        "https_forced": feature.is_enabled("force_https"),
        # debugging?
        "debug": g.debug,
        "poisoning_canary": poisoning_canary,
        "poisoning_report_mac": poisoning_report_mac,
        "cache_policy": cache_policy,
        "send_logs": g.live_config["frontend_logging"],
        "server_time": math.floor(time.time()),
        "status_msg": {
            "fetching": _("fetching title..."),
            "submitting": _("submitting..."),
            "loading": _("loading...")
        },
        "is_fake": isinstance(c.site, FakeSubreddit),
        "tracker_url": "",  # overridden below if configured
        "adtracker_url": g.adtracker_url,
        "clicktracker_url": g.clicktracker_url,
        "uitracker_url": g.uitracker_url,
        "eventtracker_url": g.eventtracker_url,
        "anon_eventtracker_url": g.anon_eventtracker_url,
        "events_collector_url": events_collector_url,
        "events_collector_key": events_collector_key,
        "events_collector_secret": events_collector_secret,
        "feature_screenview_events": feature.is_enabled('screenview_events'),
        "static_root": static(''),
        "over_18": bool(c.over18),
        "new_window": logged and bool(c.user.pref_newwindow),
        "mweb_blacklist_expressions":
        g.live_config['mweb_blacklist_expressions'],
        "gold": gold,
        "has_subscribed": logged and c.user.has_subscribed,
        "is_sponsor": logged and c.user_is_sponsor,
        "pageInfo": {
            "verification": verification,
            "actionName": route_name,
        },
        "facebook_app_id": g.live_config["facebook_app_id"],
        "feature_new_report_dialog": feature.is_enabled('new_report_dialog'),
        "email_verified": logged and c.user.email and c.user.email_verified,
    }

    if g.tracker_url:
        config["tracker_url"] = tracking.get_pageview_pixel_url()

    if g.uncompressedJS:
        config["uncompressedJS"] = True

    if extra_config:
        config.update(extra_config)

    hooks.get_hook("js_config").call(config=config)

    return config
Beispiel #4
0
    def POST_report_cache_poisoning(
            self,
            report_mac,
            poisoner_name,
            poisoner_id,
            poisoner_canary,
            victim_canary,
            render_time,
            route_name,
            url,
            source,
            cache_policy,
            resp_headers,
    ):
        """Report an instance of cache poisoning and its details"""

        self.OPTIONS_report_cache_poisoning()

        if c.errors:
            abort(400)

        # prevent simple CSRF by requiring a custom header
        if not request.headers.get('X-Loggit'):
            abort(403)

        # Eh? Why are you reporting this if the canaries are the same?
        if poisoner_canary == victim_canary:
            abort(400)

        expected_mac = make_poisoning_report_mac(
            poisoner_canary=poisoner_canary,
            poisoner_name=poisoner_name,
            poisoner_id=poisoner_id,
            cache_policy=cache_policy,
            source=source,
            route_name=route_name,
        )
        if not constant_time_compare(report_mac, expected_mac):
            abort(403)

        if resp_headers:
            try:
                resp_headers = json.loads(resp_headers)
                # Verify this is a JSON map of `header_name => [value, ...]`
                if not isinstance(resp_headers, dict):
                    abort(400)
                for hdr_name, hdr_vals in resp_headers.iteritems():
                    if not isinstance(hdr_name, basestring):
                        abort(400)
                    if not all(isinstance(h, basestring) for h in hdr_vals):
                        abort(400)
            except ValueError:
                abort(400)

        if not resp_headers:
            resp_headers = {}

        poison_info = dict(
            poisoner_name=poisoner_name,
            poisoner_id=str(poisoner_id),
            # Convert the JS timestamp to a standard one
            render_time=render_time * 1000,
            route_name=route_name,
            url=url,
            source=source,
            cache_policy=cache_policy,
            resp_headers=resp_headers,
        )

        # For immediate feedback when tracking the effects of caching changes
        g.stats.simple_event("cache.poisoning.%s.%s" % (source, cache_policy))
        # For longer-term diagnosing of caching issues
        g.events.cache_poisoning_event(poison_info, request=request, context=c)

        VRatelimit.ratelimit(rate_ip=True, prefix="rate_poison_", seconds=10)

        return self.api_wrapper({})
Beispiel #5
0
def js_config(extra_config=None):
    logged = c.user_is_loggedin and c.user.name
    user_id = c.user_is_loggedin and c.user._id
    user_in_timeout = c.user_is_loggedin and c.user.in_timeout
    gold = bool(logged and c.user.gold)
    controller_name = request.environ['pylons.routes_dict']['controller']
    action_name = request.environ['pylons.routes_dict']['action']
    route_name = controller_name + '.' + action_name

    cache_policy = "loggedout_www"
    if c.user_is_loggedin:
        cache_policy = "loggedin_www_new"

    # Canary for detecting cache poisoning
    poisoning_canary = None
    poisoning_report_mac = None
    if logged:
        if "pc" in c.cookies and len(c.cookies["pc"].value) == 2:
            poisoning_canary = c.cookies["pc"].value
            poisoning_report_mac = make_poisoning_report_mac(
                poisoner_canary=poisoning_canary,
                poisoner_name=logged,
                poisoner_id=user_id,
                cache_policy=cache_policy,
                source="web",
                route_name=route_name,
            )

    mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1)
    verification = mac.hexdigest()
    cur_subreddit = ""
    cur_sr_fullname = ""
    cur_listing = ""
    listing_over_18 = False
    pref_no_profanity = not logged or c.user.pref_no_profanity
    pref_media_preview = c.user.pref_media_preview

    if not feature.is_enabled("autoexpand_media_previews"):
        expando_preference = None
    elif pref_media_preview == "subreddit":
        expando_preference = "subreddit_default"
    elif pref_media_preview == "on":
        expando_preference = "auto_expand"
    else:
        expando_preference = "do_not_expand"

    pref_beta = c.user.pref_beta
    nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged

    if isinstance(c.site, Subreddit) and not c.default_sr:
        cur_subreddit = c.site.name
        cur_sr_fullname = c.site._fullname
        cur_listing = cur_subreddit
        listing_over_18 = c.site.over_18
    elif isinstance(c.site, DefaultSR):
        cur_listing = "frontpage"
    elif isinstance(c.site, FakeSubreddit):
        cur_listing = c.site.name

    if g.debug:
        events_collector_url = g.events_collector_test_url
        events_collector_key = g.secrets['events_collector_test_js_key']
        events_collector_secret = g.secrets['events_collector_test_js_secret']
    else:
        events_collector_url = g.events_collector_url
        events_collector_key = g.secrets['events_collector_js_key']
        events_collector_secret = g.secrets['events_collector_js_secret']

    bp_currencyrates = hooks.get_hook("fetchcurrencyrates").call()[0]
    
    config = {
        # is the user logged in?
        "logged": logged,
        # logged in user's id
        "user_id": user_id,
        # is user in timeout?
        "user_in_timeout": user_in_timeout,
        # the subreddit's name (for posts)
        "post_site": cur_subreddit,
        "cur_site": cur_sr_fullname,
        "cur_listing": cur_listing,
        # the user's voting hash
        "modhash": c.modhash or False,
        # the current rendering style
        "renderstyle": c.render_style,

        # they're welcome to try to override this in the DOM because we just
        # disable the features server-side if applicable
        'store_visits': gold and c.user.pref_store_visits,

        # current domain
        "cur_domain": get_domain(subreddit=False, no_www=True),
        # where do ajax requests go?
        "ajax_domain": get_domain(subreddit=False),
        "stats_domain": g.stats_domain or '',
        "stats_sample_rate": g.stats_sample_rate or 0,
        "extension": c.extension,
        "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint,
        "media_domain": g.media_domain,
        # does the client only want to communicate over HTTPS?
        "https_forced": feature.is_enabled("force_https"),
        "debug": g.debug,
        "donation_address": g.live_config["blockpath_donation_addr"] if 'blockpath_donation_addr' in g.live_config else '',
        "bpcritical_notification": g.live_config["bpcritical_notification"] if 'bpcritical_notification' in g.live_config else '',
        "bp_qb_wallets": [i for i in c.user.bp_qb_wallets.split(',') if i],
        "pref_bp_gravity": c.user.pref_bp_gravity,
        "pref_bp_linkdistance": c.user.pref_bp_linkdistance,
        "pref_bp_chargedistance": c.user.pref_bp_chargedistance,
        "pref_bp_charge": c.user.pref_bp_charge,
        "pref_bp_theta": c.user.pref_bp_theta,
        "pref_bp_friction": c.user.pref_bp_friction,
        "pref_bp_nodesoftlimit": c.user.pref_bp_nodesoftlimit,
        "pref_bp_linksoftlimit": c.user.pref_bp_linksoftlimit,
        "pref_bp_currency": c.user.pref_bp_currency,
        "pref_bp_currencyatot": c.user.pref_bp_currencyatot,
        "pref_bp_linklabel": c.user.pref_bp_linklabel,
        "pref_bp_linklabeldir": c.user.pref_bp_linklabeldir,
        "bp_currencyrates": bp_currencyrates,
        "poisoning_canary": poisoning_canary,
        "poisoning_report_mac": poisoning_report_mac,
        "cache_policy": cache_policy,
        "send_logs": g.live_config["frontend_logging"],
        "server_time": math.floor(time.time()),
        "status_msg": {
          "fetching": _("fetching title..."),
          "submitting": _("submitting..."),
          "loading": _("loading...")
        },
        "is_fake": isinstance(c.site, FakeSubreddit),
        "tracker_url": "",  # overridden below if configured
        "adtracker_url": g.adtracker_url,
        "clicktracker_url": g.clicktracker_url,
        "uitracker_url": g.uitracker_url,
        "eventtracker_url": g.eventtracker_url,
        "anon_eventtracker_url": g.anon_eventtracker_url,
        "events_collector_url": events_collector_url,
        "events_collector_key": events_collector_key,
        "events_collector_secret": events_collector_secret,
        "feature_screenview_events": feature.is_enabled('screenview_events'),
        "static_root": static(''),
        "over_18": bool(c.over18),
        "listing_over_18": listing_over_18,
        "expando_preference": expando_preference,
        "pref_no_profanity": pref_no_profanity,
        "pref_beta": pref_beta,
        "nsfw_media_acknowledged": nsfw_media_acknowledged,
        "new_window": logged and bool(c.user.pref_newwindow),
        "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'],
        "gold": gold,
        "has_subscribed": logged and c.user.has_subscribed,
        "is_sponsor": logged and c.user_is_sponsor,
        "pageInfo": {
          "verification": verification,
          "actionName": route_name,
        },
        "facebook_app_id": g.live_config["facebook_app_id"],
        "feature_new_report_dialog": feature.is_enabled('new_report_dialog'),
        "email_verified": logged and c.user.email and c.user.email_verified,
        "email": c.user.email
    }

    if g.tracker_url:
        config["tracker_url"] = tracking.get_pageview_pixel_url()

    if g.uncompressedJS:
        config["uncompressedJS"] = True

    if extra_config:
        config.update(extra_config)

    hooks.get_hook("js_config").call(config=config)

    return config
Beispiel #6
0
def js_config(extra_config=None):
    from r2.lib import promote

    logged = c.user_is_loggedin and c.user.name
    user_id = c.user_is_loggedin and c.user._id
    user_in_timeout = c.user_is_loggedin and c.user.in_timeout
    gold = bool(logged and c.user.gold)
    controller_name = request.environ['pylons.routes_dict']['controller']
    action_name = request.environ['pylons.routes_dict']['action']
    route_name = controller_name + '.' + action_name
    stats_name = route_name + ('' if promote.ads_enabled() else '.no_ads')
    banners_enabled = promote.banners_enabled(c.site, c.user)

    cache_policy = "loggedout_www"
    if c.user_is_loggedin:
        cache_policy = "loggedin_www_new"

    # Canary for detecting cache poisoning
    poisoning_canary = None
    poisoning_report_mac = None
    if logged:
        if "pc" in c.cookies and len(c.cookies["pc"].value) == 2:
            poisoning_canary = c.cookies["pc"].value
            poisoning_report_mac = make_poisoning_report_mac(
                poisoner_canary=poisoning_canary,
                poisoner_name=logged,
                poisoner_id=user_id,
                cache_policy=cache_policy,
                source="web",
                route_name=route_name,
            )

    share_ts = int(time.time() * 1000)
    share_tracking_hmac = None
    # Only enable for comments pages on desktop
    if (feature.is_enabled("url_share_tracking") and c.render_style == "html"
            and action_name == "GET_comments"):
        share_hash_msg = "%s" % share_ts
        if user_id:
            share_hash_msg = "%s|%s" % (user_id, share_ts)
        elif c.loid.loid:
            share_hash_msg = "%s|%s" % (c.loid.loid, share_ts)
        share_tracking_hmac = hmac.new(g.secrets["share_tracking"],
                                       share_hash_msg,
                                       hashlib.sha1).hexdigest()

    mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1)
    verification = mac.hexdigest()
    stats_mac = hmac.new(g.secrets["action_name"], stats_name, hashlib.sha1)
    stats_verification = stats_mac.hexdigest()
    cur_subreddit = ""
    cur_sr_fullname = ""
    cur_listing = ""
    listing_over_18 = False
    pref_no_profanity = not logged or c.user.pref_no_profanity
    pref_media_preview = c.user.pref_media_preview if logged else "subreddit"

    if pref_media_preview == "subreddit":
        expando_preference = "subreddit_default"
    elif pref_media_preview == "on":
        expando_preference = "auto_expand"
    else:
        expando_preference = "do_not_expand"

    pref_beta = logged and c.user.pref_beta

    lazy_load_listings = (c.user.pref_numsites == 25
                          and controller_name in ['hot', 'new']
                          and feature.is_enabled("lazy_load_listings"))

    feature_frontpage_tagline = feature.is_enabled("frontpage_tagline")
    nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged

    if isinstance(c.site, Subreddit) and not c.default_sr:
        cur_subreddit = c.site.name
        cur_sr_fullname = c.site._fullname
        cur_listing = cur_subreddit
        listing_over_18 = c.site.over_18
    elif isinstance(c.site, DefaultSR):
        cur_listing = "frontpage"
    elif isinstance(c.site, FakeSubreddit):
        cur_listing = c.site.name

    if g.debug:
        events_collector_url = g.events_collector_test_url
        events_collector_key = g.secrets['events_collector_test_js_key']
        events_collector_secret = g.secrets['events_collector_test_js_secret']
    else:
        events_collector_url = g.events_collector_url
        events_collector_key = g.secrets['events_collector_js_key']
        events_collector_secret = g.secrets['events_collector_js_secret']

    if feature.is_enabled("live_orangereds") and logged:
        user_websocket_url = websockets.make_url("/user/%s" % c.user._id36,
                                                 max_age=24 * 60 * 60)
    else:
        user_websocket_url = None

    config = {
        # is the user logged in?
        "logged":
        logged,
        # logged in user's id
        "user_id":
        user_id,
        # is user in timeout?
        "user_in_timeout":
        user_in_timeout,
        # the subreddit's name (for posts)
        "post_site":
        cur_subreddit,
        "cur_site":
        cur_sr_fullname,
        "cur_listing":
        cur_listing,
        # the user's voting hash
        "modhash":
        c.modhash or False,
        # the current rendering style
        "renderstyle":
        c.render_style,

        # they're welcome to try to override this in the DOM because we just
        # disable the features server-side if applicable
        'store_visits':
        gold and c.user.pref_store_visits,

        # current domain
        "cur_domain":
        get_domain(subreddit=False, no_www=True),
        # where do ajax requests go?
        "ajax_domain":
        get_domain(subreddit=False),
        "stats_domain":
        g.stats_domain or '',
        "stats_sample_rate":
        g.stats_sample_rate or 0,
        "extension":
        c.extension,
        "https_endpoint":
        is_subdomain(request.host, g.domain) and g.https_endpoint,
        "media_domain":
        g.media_domain,
        # does the client only want to communicate over HTTPS?
        "https_forced":
        feature.is_enabled("force_https"),
        # debugging?
        "debug":
        g.debug,
        "poisoning_canary":
        poisoning_canary,
        "poisoning_report_mac":
        poisoning_report_mac,
        "cache_policy":
        cache_policy,
        "send_logs":
        g.live_config["frontend_logging"],
        "server_time":
        math.floor(time.time()),
        "status_msg": {
            "fetching": _("fetching title..."),
            "submitting": _("submitting..."),
            "loading": _("loading...")
        },
        "is_fake":
        isinstance(c.site, FakeSubreddit),
        "tracker_url":
        "",  # overridden below if configured
        "adtracker_url":
        g.adtracker_url,
        "clicktracker_url":
        g.clicktracker_url,
        "uitracker_url":
        g.uitracker_url,
        "eventtracker_url":
        g.eventtracker_url,
        "anon_eventtracker_url":
        g.anon_eventtracker_url,
        "events_collector_url":
        events_collector_url,
        "events_collector_key":
        events_collector_key,
        "events_collector_secret":
        events_collector_secret,
        "feature_screenview_events":
        feature.is_enabled('screenview_events'),
        "feature_outbound_beacons":
        feature.is_enabled('outbound_beacons'),
        "feature_scroll_events":
        feature.is_enabled('scroll_events'),
        "static_root":
        static(''),
        "over_18":
        bool(c.over18),
        "listing_over_18":
        listing_over_18,
        "expando_preference":
        expando_preference,
        "pref_no_profanity":
        pref_no_profanity,
        "pref_beta":
        pref_beta,
        "nsfw_media_acknowledged":
        nsfw_media_acknowledged,
        "new_window":
        logged and bool(c.user.pref_newwindow),
        "mweb_blacklist_expressions":
        g.live_config['mweb_blacklist_expressions'],
        "gold":
        gold,
        "has_subscribed":
        logged and c.user.has_subscribed,
        "is_sponsor":
        logged and c.user_is_sponsor,
        "pageInfo": {
            "verification": verification,
            "actionName": route_name,
            "statsName": stats_name,
            "statsVerification": stats_verification,
        },
        "facebook_app_id":
        g.live_config["facebook_app_id"],
        "feature_expando_events":
        feature.is_enabled('expando_events'),
        "feature_new_report_dialog":
        feature.is_enabled('new_report_dialog'),
        "feature_ads_user_matching":
        feature.is_enabled('ads_user_matching'),
        "feature_flatlist_events":
        feature.is_enabled('flatlist_events'),
        "feature_mobile_native_banner":
        feature.is_enabled('mobile_native_banner'),
        "email_verified":
        logged and c.user.email and c.user.email_verified,
        "feature_post_embed":
        feature.is_enabled('post_embed'),
        "ad_serving_events_sample_rate":
        g.live_config.get("events_collector_ad_serving_sample_rate", 0),
        "share_tracking_hmac":
        share_tracking_hmac,
        "share_tracking_ts":
        share_ts,
        "user_websocket_url":
        user_websocket_url,
        "live_orangereds_pref":
        c.user.pref_live_orangereds,
        "pref_email_messages":
        logged and c.user.pref_email_messages,
        "feature_double_sidebar":
        banners_enabled and not isinstance(c.site, FakeSubreddit),  # noqa
        "feature_lazy_load_listings":
        lazy_load_listings,
        "ads_loading_timeout_ms":
        g.live_config.get("ads_loading_timeout_ms", 1000),
        "feature_frontpage_tagline":
        feature_frontpage_tagline,
    }

    if feature.is_enabled("eu_cookie_policy"):
        config.update({
            "requires_eu_cookie_policy":
            geoip.requires_eu_cookie_policy(request, c),
            "eu_cookie":
            g.eu_cookie,
            "eu_cookie_max_attempts":
            g.eu_cookie_max_attempts,
        })

    if g.tracker_url:
        config["tracker_url"] = tracking.get_pageview_pixel_url()

    if g.uncompressedJS:
        config["uncompressedJS"] = True

    if extra_config:
        config.update(extra_config)

    hooks.get_hook("js_config").call(config=config)

    return config
Beispiel #7
0
def js_config(extra_config=None):
    from r2.lib import promote

    logged = c.user_is_loggedin and c.user.name
    user_id = c.user_is_loggedin and c.user._id
    user_in_timeout = c.user_is_loggedin and c.user.in_timeout
    gold = bool(logged and c.user.gold)
    controller_name = request.environ['pylons.routes_dict']['controller']
    action_name = request.environ['pylons.routes_dict']['action']
    route_name = controller_name + '.' + action_name
    stats_name = route_name + ('' if promote.ads_enabled() else '.no_ads')
    banners_enabled = promote.banners_enabled(c.site, c.user)

    cache_policy = "loggedout_www"
    if c.user_is_loggedin:
        cache_policy = "loggedin_www_new"

    # Canary for detecting cache poisoning
    poisoning_canary = None
    poisoning_report_mac = None
    if logged:
        if "pc" in c.cookies and len(c.cookies["pc"].value) == 2:
            poisoning_canary = c.cookies["pc"].value
            poisoning_report_mac = make_poisoning_report_mac(
                poisoner_canary=poisoning_canary,
                poisoner_name=logged,
                poisoner_id=user_id,
                cache_policy=cache_policy,
                source="web",
                route_name=route_name,
            )

    share_ts = int(time.time() * 1000)
    share_tracking_hmac = None
    # Only enable for comments pages on desktop
    if (feature.is_enabled("url_share_tracking") and
            c.render_style == "html" and
            action_name == "GET_comments"):
        share_hash_msg = "%s" % share_ts
        if user_id:
            share_hash_msg = "%s|%s" % (user_id, share_ts)
        elif c.loid.loid:
            share_hash_msg = "%s|%s" % (c.loid.loid, share_ts)
        share_tracking_hmac = hmac.new(g.secrets["share_tracking"],
            share_hash_msg, hashlib.sha1).hexdigest()

    mac = hmac.new(g.secrets["action_name"], route_name, hashlib.sha1)
    verification = mac.hexdigest()
    stats_mac = hmac.new(g.secrets["action_name"], stats_name, hashlib.sha1)
    stats_verification = stats_mac.hexdigest()
    cur_subreddit = ""
    cur_sr_fullname = ""
    cur_listing = ""
    listing_over_18 = False
    pref_no_profanity = not logged or c.user.pref_no_profanity
    pref_media_preview = c.user.pref_media_preview if logged else "subreddit"

    if pref_media_preview == "subreddit":
        expando_preference = "subreddit_default"
    elif pref_media_preview == "on":
        expando_preference = "auto_expand"
    else:
        expando_preference = "do_not_expand"

    pref_beta = logged and c.user.pref_beta

    lazy_load_listings = (
        c.user.pref_numsites == 25 and
        controller_name in ['hot', 'new'] and
        feature.is_enabled("lazy_load_listings")
    )

    feature_frontpage_tagline = feature.is_enabled("frontpage_tagline")
    nsfw_media_acknowledged = logged and c.user.nsfw_media_acknowledged

    if isinstance(c.site, Subreddit) and not c.default_sr:
        cur_subreddit = c.site.name
        cur_sr_fullname = c.site._fullname
        cur_listing = cur_subreddit
        listing_over_18 = c.site.over_18
    elif isinstance(c.site, DefaultSR):
        cur_listing = "frontpage"
    elif isinstance(c.site, FakeSubreddit):
        cur_listing = c.site.name

    if g.debug:
        events_collector_url = g.events_collector_test_url
        events_collector_key = g.secrets['events_collector_test_js_key']
        events_collector_secret = g.secrets['events_collector_test_js_secret']
    else:
        events_collector_url = g.events_collector_url
        events_collector_key = g.secrets['events_collector_js_key']
        events_collector_secret = g.secrets['events_collector_js_secret']

    if feature.is_enabled("live_orangereds") and logged:
        user_websocket_url = websockets.make_url("/user/%s" % c.user._id36,
            max_age=24 * 60 * 60)
    else:
        user_websocket_url = None

    config = {
        # is the user logged in?
        "logged": logged,
        # logged in user's id
        "user_id": user_id,
        # is user in timeout?
        "user_in_timeout": user_in_timeout,
        # the subreddit's name (for posts)
        "post_site": cur_subreddit,
        "cur_site": cur_sr_fullname,
        "cur_listing": cur_listing,
        # the user's voting hash
        "modhash": c.modhash or False,
        # the current rendering style
        "renderstyle": c.render_style,

        # they're welcome to try to override this in the DOM because we just
        # disable the features server-side if applicable
        'store_visits': gold and c.user.pref_store_visits,

        # current domain
        "cur_domain": get_domain(subreddit=False, no_www=True),
        # where do ajax requests go?
        "ajax_domain": get_domain(subreddit=False),
        "stats_domain": g.stats_domain or '',
        "stats_sample_rate": g.stats_sample_rate or 0,
        "extension": c.extension,
        "https_endpoint": is_subdomain(request.host, g.domain) and g.https_endpoint,
        "media_domain": g.media_domain,
        # does the client only want to communicate over HTTPS?
        "https_forced": feature.is_enabled("force_https"),
        # debugging?
        "debug": g.debug,
        "poisoning_canary": poisoning_canary,
        "poisoning_report_mac": poisoning_report_mac,
        "cache_policy": cache_policy,
        "send_logs": g.live_config["frontend_logging"],
        "server_time": math.floor(time.time()),
        "status_msg": {
          "fetching": _("fetching title..."),
          "submitting": _("submitting..."),
          "loading": _("loading...")
        },
        "is_fake": isinstance(c.site, FakeSubreddit),
        "tracker_url": "",  # overridden below if configured
        "adtracker_url": g.adtracker_url,
        "clicktracker_url": g.clicktracker_url,
        "uitracker_url": g.uitracker_url,
        "eventtracker_url": g.eventtracker_url,
        "anon_eventtracker_url": g.anon_eventtracker_url,
        "events_collector_url": events_collector_url,
        "events_collector_key": events_collector_key,
        "events_collector_secret": events_collector_secret,
        "feature_screenview_events": feature.is_enabled('screenview_events'),
        "feature_outbound_beacons": feature.is_enabled('outbound_beacons'),
        "feature_scroll_events": feature.is_enabled('scroll_events'),
        "static_root": static(''),
        "over_18": bool(c.over18),
        "listing_over_18": listing_over_18,
        "expando_preference": expando_preference,
        "pref_no_profanity": pref_no_profanity,
        "pref_beta": pref_beta,
        "nsfw_media_acknowledged": nsfw_media_acknowledged,
        "new_window": logged and bool(c.user.pref_newwindow),
        "mweb_blacklist_expressions": g.live_config['mweb_blacklist_expressions'],
        "gold": gold,
        "has_subscribed": logged and c.user.has_subscribed,
        "is_sponsor": logged and c.user_is_sponsor,
        "pageInfo": {
          "verification": verification,
          "actionName": route_name,
          "statsName": stats_name,
          "statsVerification": stats_verification,
        },
        "facebook_app_id": g.live_config["facebook_app_id"],
        "feature_expando_events": feature.is_enabled('expando_events'),
        "feature_new_report_dialog": feature.is_enabled('new_report_dialog'),
        "feature_ads_user_matching": feature.is_enabled('ads_user_matching'),
        "feature_flatlist_events": feature.is_enabled('flatlist_events'),
        "feature_mobile_native_banner": feature.is_enabled(
            'mobile_native_banner'),
        "email_verified": logged and c.user.email and c.user.email_verified,
        "feature_post_embed": feature.is_enabled('post_embed'),
        "ad_serving_events_sample_rate": g.live_config.get(
            "events_collector_ad_serving_sample_rate", 0),
        "share_tracking_hmac": share_tracking_hmac,
        "share_tracking_ts": share_ts,
        "user_websocket_url": user_websocket_url,
        "live_orangereds_pref": c.user.pref_live_orangereds,
        "pref_email_messages": logged and c.user.pref_email_messages,
        "feature_double_sidebar": banners_enabled and not isinstance(c.site, FakeSubreddit),  # noqa
        "feature_lazy_load_listings": lazy_load_listings,
        "ads_loading_timeout_ms": g.live_config.get(
            "ads_loading_timeout_ms", 1000),
        "feature_frontpage_tagline": feature_frontpage_tagline,
    }

    if feature.is_enabled("eu_cookie_policy"):
        config.update({
            "requires_eu_cookie_policy": geoip.requires_eu_cookie_policy(request, c),
            "eu_cookie": g.eu_cookie,
            "eu_cookie_max_attempts": g.eu_cookie_max_attempts,
        })

    if g.tracker_url:
        config["tracker_url"] = tracking.get_pageview_pixel_url()

    if g.uncompressedJS:
        config["uncompressedJS"] = True

    if extra_config:
        config.update(extra_config)

    hooks.get_hook("js_config").call(config=config)

    return config