def post(self): c.request_timer.intermediate("action") # if the action raised an HTTPException (i.e. it aborted) then pylons # will have replaced response with the exception itself. c.is_exception_response = getattr(response, "_exception", False) if c.response_wrapper and not c.is_exception_response: content = flatten_response(response.content) wrapped_content = c.response_wrapper(content) response.content = wrapped_content if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' if c.deny_frames: response.headers["X-Frame-Options"] = "DENY" #set content cache if (g.page_cache_time and request.method.upper() == 'GET' and (not c.user_is_loggedin or c.allow_loggedin_cache) and not c.used_cache and response.status_int != 429 and not response.status.startswith("5") and not c.is_exception_response): try: g.pagecache.set(self.request_key(), (response._current_obj(), c.cookies), g.page_cache_time) except MemcachedError as e: # this codepath will actually never be hit as long as # the pagecache memcached client is in no_reply mode. g.log.warning("Ignored exception (%r) on pagecache " "write for %r", e, request.path) # send cookies for k, v in c.cookies.iteritems(): if v.dirty: response.set_cookie(key=k, value=quote(v.value), domain=v.domain, expires=v.expires, secure=getattr(v, 'secure', False), httponly=getattr(v, 'httponly', False)) if self.should_update_last_visit(): c.user.update_last_visit(c.start_time) hooks.get_hook("reddit.request.end").call() # this thread is probably going to be reused, but it could be # a while before it is. So we might as well dump the cache in # the mean time so that we don't have dead objects hanging # around taking up memory g.reset_caches() c.request_timer.intermediate("post") # push data to statsd c.request_timer.stop() g.stats.flush()
def post(self): c.request_timer.intermediate("action") # if the action raised an HTTPException (i.e. it aborted) then pylons # will have replaced response with the exception itself. c.is_exception_response = getattr(response, "_exception", False) if c.response_wrapper and not c.is_exception_response: content = flatten_response(response.content) wrapped_content = c.response_wrapper(content) response.content = wrapped_content if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' if c.deny_frames: response.headers["X-Frame-Options"] = "DENY" # save the result of this page to the pagecache if possible. we # mustn't cache things that rely on state not tracked by request_key # such as If-Modified-Since headers for 304s or requesting IP for 429s. if (g.page_cache_time and request.method.upper() == 'GET' and c.can_use_pagecache and not c.used_cache and response.status_int not in (304, 429) and not response.status.startswith("5") and not c.is_exception_response): try: g.pagecache.set(self.request_key(), (response._current_obj(), c.cookies), g.page_cache_time) except MemcachedError as e: # this codepath will actually never be hit as long as # the pagecache memcached client is in no_reply mode. g.log.warning("Ignored exception (%r) on pagecache " "write for %r", e, request.path) pragmas = [p.strip() for p in request.headers.get("Pragma", "").split(",")] if g.debug or "x-reddit-pagecache" in pragmas: if c.can_use_pagecache: pagecache_state = "hit" if c.used_cache else "miss" else: pagecache_state = "disallowed" response.headers["X-Reddit-Pagecache"] = pagecache_state # send cookies for k, v in c.cookies.iteritems(): if v.dirty: response.set_cookie(key=k, value=quote(v.value), domain=v.domain, expires=v.expires, secure=getattr(v, 'secure', False), httponly=getattr(v, 'httponly', False)) if self.should_update_last_visit(): c.user.update_last_visit(c.start_time) hooks.get_hook("reddit.request.end").call() # this thread is probably going to be reused, but it could be # a while before it is. So we might as well dump the cache in # the mean time so that we don't have dead objects hanging # around taking up memory g.reset_caches() c.request_timer.intermediate("post") # push data to statsd c.request_timer.stop() g.stats.flush()
def post(self): c.request_timer.intermediate("action") # if the action raised an HTTPException (i.e. it aborted) then pylons # will have replaced response with the exception itself. c.is_exception_response = getattr(response, "_exception", False) if c.response_wrapper and not c.is_exception_response: content = flatten_response(response.content) wrapped_content = c.response_wrapper(content) response.content = wrapped_content if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' if c.deny_frames: response.headers["X-Frame-Options"] = "DENY" # save the result of this page to the pagecache if possible. we # mustn't cache things that rely on state not tracked by request_key # such as If-Modified-Since headers for 304s or requesting IP for 429s. if (g.page_cache_time and request.method.upper() == 'GET' and c.can_use_pagecache and not c.used_cache and response.status_int not in (304, 429) and not response.status.startswith("5") and not c.is_exception_response): try: g.pagecache.set(self.request_key(), (response._current_obj(), c.cookies), g.page_cache_time) except MemcachedError as e: # this codepath will actually never be hit as long as # the pagecache memcached client is in no_reply mode. g.log.warning( "Ignored exception (%r) on pagecache " "write for %r", e, request.path) pragmas = [ p.strip() for p in request.headers.get("Pragma", "").split(",") ] if g.debug or "x-reddit-pagecache" in pragmas: if c.can_use_pagecache: pagecache_state = "hit" if c.used_cache else "miss" else: pagecache_state = "disallowed" response.headers["X-Reddit-Pagecache"] = pagecache_state # send cookies for k, v in c.cookies.iteritems(): if v.dirty: response.set_cookie(key=k, value=quote(v.value), domain=v.domain, expires=v.expires, secure=getattr(v, 'secure', False), httponly=getattr(v, 'httponly', False)) if self.should_update_last_visit(): c.user.update_last_visit(c.start_time) hooks.get_hook("reddit.request.end").call() # this thread is probably going to be reused, but it could be # a while before it is. So we might as well dump the cache in # the mean time so that we don't have dead objects hanging # around taking up memory g.reset_caches() c.request_timer.intermediate("post") # push data to statsd c.request_timer.stop() g.stats.flush()
def post(self): c.request_timer.intermediate("action") if c.response_wrapper: content = "".join(_force_utf8(x) for x in tup(response.content) if x) wrapped_content = c.response_wrapper(content) response.content = wrapped_content if c.user_is_loggedin and not c.allow_loggedin_cache: response.headers['Cache-Control'] = 'no-cache' response.headers['Pragma'] = 'no-cache' if c.deny_frames: response.headers["X-Frame-Options"] = "DENY" #set content cache if (g.page_cache_time and request.method.upper() == 'GET' and (not c.user_is_loggedin or c.allow_loggedin_cache) and not c.used_cache and response.status_code not in (429, 503)): try: g.pagecache.set(self.request_key(), (response._current_obj(), c.cookies), g.page_cache_time) except MemcachedError as e: # this codepath will actually never be hit as long as # the pagecache memcached client is in no_reply mode. g.log.warning("Ignored exception (%r) on pagecache " "write for %r", e, request.path) # send cookies for k, v in c.cookies.iteritems(): if v.dirty: response.set_cookie(key=k, value=quote(v.value), domain=v.domain, expires=v.expires, secure=getattr(v, 'secure', False), httponly=getattr(v, 'httponly', False)) end_time = datetime.now(g.tz) # update last_visit if (c.user_is_loggedin and not g.disallow_db_writes and request.method.upper() != "POST" and not c.dont_update_last_visit and request.path != '/validuser'): c.user.update_last_visit(c.start_time) check_request(end_time) # this thread is probably going to be reused, but it could be # a while before it is. So we might as well dump the cache in # the mean time so that we don't have dead objects hanging # around taking up memory g.reset_caches() # push data to statsd c.request_timer.stop() g.stats.flush()