def pre(self): c.start_time = datetime.now(g.tz) g.reset_caches() c.domain_prefix = request.environ.get("reddit-domain-prefix", g.domain_prefix) c.secure = request.host in g.secure_domains #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_throttled() ratelimit_agents() c.allow_loggedin_cache = False c.show_wiki_actions = False # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() c.cookies = Cookies() # if an rss feed, this will also log the user in if a feed= # GET param is included set_content_type()
def pre(self): action = request.environ["pylons.routes_dict"].get("action") if action: c.request_timer = g.stats.get_timer(request_timer_name(action)) else: c.request_timer = SimpleSillyStub() c.response_wrappers = [] c.start_time = datetime.now(g.tz) c.request_timer.start() g.reset_caches() c.domain_prefix = request.environ.get("reddit-domain-prefix", g.domain_prefix) c.secure = request.host in g.secure_domains #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_throttled() ratelimit_agents() c.allow_loggedin_cache = False c.show_wiki_actions = False # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() c.cookies = Cookies() # if an rss feed, this will also log the user in if a feed= # GET param is included set_content_type() c.request_timer.intermediate("minimal-pre")
def pre(self): c.start_time = datetime.now(g.tz) g.reset_caches() c.domain_prefix = request.environ.get("reddit-domain-prefix", g.domain_prefix) #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_agents() ratelimit_throttled() c.allow_loggedin_cache = False # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() c.cookies = Cookies()
def pre(self): g.cache.caches = (LocalCache(), ) + g.cache.caches[1:] #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_agents() # the domain has to be set before Cookies get initialized set_subreddit() set_cnameframe() # populate c.cookies c.cookies = Cookies() try: for k, v in request.cookies.iteritems(): # we can unquote even if it's not quoted c.cookies[k] = Cookie(value=unquote(v), dirty=False) except CookieError: #pylons or one of the associated retarded libraries can't #handle broken cookies request.environ['HTTP_COOKIE'] = '' c.response_wrappers = [] c.errors = ErrorSet() c.firsttime = firsttime() (c.user, maybe_admin) = \ valid_cookie(c.cookies[g.login_cookie].value if g.login_cookie in c.cookies else '') if c.user: c.user_is_loggedin = True else: c.user = UnloggedUser(get_browser_langs()) c.user._load() if c.user_is_loggedin: if not c.user._loaded: c.user._load() c.modhash = c.user.modhash() if request.method.lower() == 'get': read_click_cookie() read_mod_cookie() if hasattr(c.user, 'msgtime') and c.user.msgtime: c.have_messages = c.user.msgtime c.user_is_admin = maybe_admin and c.user.name in g.admins c.user_is_sponsor = c.user_is_admin or c.user.name in g.sponsors c.over18 = over18() #set_browser_langs() set_host_lang() set_content_type() set_iface_lang() set_content_lang() set_colors() set_recent_reddits() # set some environmental variables in case we hit an abort if not isinstance(c.site, FakeSubreddit): request.environ['REDDIT_NAME'] = c.site.name # check if the user has access to this subreddit if not c.site.can_view(c.user) and not c.error_page: abort(403, "forbidden") #check over 18 if (c.site.over_18 and not c.over18 and request.path not in ("/frame", "/over18") and c.render_style == 'html'): return self.intermediate_redirect("/over18") #check whether to allow custom styles c.allow_styles = True if g.css_killswitch: c.allow_styles = False #if the preference is set and we're not at a cname elif not c.user.pref_show_stylesheets and not c.cname: c.allow_styles = False #if the site has a cname, but we're not using it elif c.site.domain and not c.cname: c.allow_styles = False #check content cache if not c.user_is_loggedin: r = cache.get(self.request_key()) if r and request.method == 'GET': response = c.response response.headers = r.headers response.content = r.content for x in r.cookies.keys(): if x in cache_affecting_cookies: cookie = r.cookies[x] response.set_cookie(key=x, value=cookie.value, domain=cookie.get('domain', None), expires=cookie.get( 'expires', None), path=cookie.get('path', None)) response.status_code = r.status_code request.environ['pylons.routes_dict'][ 'action'] = 'cached_response' # make sure to carry over the content type c.response_content_type = r.headers['content-type'] if r.headers.has_key('access-control'): c.response_access_control = r.headers['access-control'] c.used_cache = True # response wrappers have already been applied before cache write c.response_wrappers = []