def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") # StatusBasedRedirect will override this anyway, but we need this # here for pagecache to see. response.status_int = code if srname: c.site = Subreddit._by_name(srname) if request.GET.has_key('allow_framing'): c.allow_framing = bool(request.GET['allow_framing'] == '1') if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: response.headers['x-sup-id'] = x_sup_id return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) if request.environ.get("WANT_RAW_JSON"): return scriptsafe_dumps(data) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': random.choice(self.admins)} failien_url = make_failien_url() sad_message = safemarkdown(rand_strings.sadmessages % randmin) return redditbroke % (failien_url, sad_message) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except Exception as e: return handle_awful_failure("ErrorController.GET_document: %r" % e)
def setUp(self): # Reset the validator state and errors before every test. self.validator = VSubmitParent(None) c.errors = ErrorSet() c.user_is_loggedin = True c.user_is_admin = False c.user = Account(id=100) Account.enemy_ids = MagicMock(return_value=[])
def setUp(self): super(TestVSubmitParent, self).setUp() # Reset the validator state and errors before every test. self.validator = VSubmitParent(None) c.errors = ErrorSet() c.user_is_loggedin = True c.user_is_admin = False c.user = Account(id=100) self.autopatch(Account, "enemy_ids", return_value=[]) self.autopatch(Subreddit, "_byID", return_value=None)
def pre(self): action = request.environ["pylons.routes_dict"].get("action") if action: if not self._get_action_handler(): action = 'invalid' c.request_timer = g.stats.get_timer(request_timer_name(action)) else: c.request_timer = SimpleSillyStub() c.response_wrapper = None c.start_time = datetime.now(g.tz) c.request_timer.start() g.reset_caches() c.domain_prefix = request.environ.get("reddit-domain-prefix", g.domain_prefix) c.secure = request.host in g.secure_domains # wsgi.url_scheme is used in generating absolute urls, such as by webob # for translating some of our relative-url redirects to rfc compliant # absolute-url ones. TODO: consider using one of webob's methods of # setting wsgi.url_scheme based on incoming request headers added by # upstream things like stunnel/haproxy. if c.secure: request.environ["wsgi.url_scheme"] = "https" c.request_origin = request.host_url #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_throttled() ratelimit_agents() c.allow_loggedin_cache = False # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() c.cookies = Cookies() # if an rss feed, this will also log the user in if a feed= # GET param is included set_content_type() c.request_timer.intermediate("minimal-pre") # True/False forces. None updates for most non-POST requests c.update_last_visit = None g.stats.count_string('user_agents', request.user_agent) hooks.get_hook("reddit.request.minimal_begin").call()
def pre(self): action = request.environ["pylons.routes_dict"].get("action") if action: if not self._get_action_handler(): action = 'invalid' c.request_timer = g.stats.get_timer(request_timer_name(action)) else: c.request_timer = SimpleSillyStub() c.response_wrapper = None c.start_time = datetime.now(g.tz) c.request_timer.start() g.reset_caches() c.domain_prefix = request.environ.get("reddit-domain-prefix", g.domain_prefix) c.secure = request.environ["wsgi.url_scheme"] == "https" c.request_origin = request.host_url #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_throttled() ratelimit_agents() c.allow_loggedin_cache = False c.allow_framing = False c.cdn_cacheable = (request.via_cdn and g.login_cookie not in request.cookies) # the domain has to be set before Cookies get initialized set_subreddit() c.errors = ErrorSet() c.cookies = Cookies() # if an rss feed, this will also log the user in if a feed= # GET param is included set_content_type() c.request_timer.intermediate("minimal-pre") # True/False forces. None updates for most non-POST requests c.update_last_visit = None g.stats.count_string('user_agents', request.user_agent) if not self.defer_ratelimiting: self.run_sitewide_ratelimits() c.request_timer.intermediate("minimal-ratelimits") hooks.get_hook("reddit.request.minimal_begin").call()
def _checkFails(self, password, fatal=False, error=errors.WRONG_PASSWORD): # So we don't have any stale errors laying around c.errors = ErrorSet() validator = VVerifyPassword('dummy', fatal=fatal) if fatal: try: validator.run(password) except HTTPException: return True return False else: validator.run(password) return validator.has_errors or c.errors.get((error, None))
def setUp(self): # Reset the validator state and errors before every test. self.validator = ValidEmail() c.errors = ErrorSet()
def setUp(self): # Reset the validator state and errors before every test. self.validator = VSubredditName(None) c.errors = ErrorSet()
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', '') error_name = request.GET.get('error_name', '') if isinstance(c.user, basestring): # somehow requests are getting here with c.user unset c.user_is_loggedin = False c.user = UnloggedUser(browser_langs=None) if srname: c.site = Subreddit._by_name(srname) if request.GET.has_key('allow_framing'): c.allow_framing = bool(request.GET['allow_framing'] == '1') if (error_name == 'IN_TIMEOUT' and not 'usable_error_content' in request.environ): timeout_days_remaining = c.user.days_remaining_in_timeout errpage = pages.InterstitialPage( _("suspended"), content=pages.InTimeoutInterstitial( timeout_days_remaining=timeout_days_remaining, ), ) request.environ['usable_error_content'] = errpage.render() if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) message = request.GET.get('message', '') if message: data['message'] = message if request.environ.get("WANT_RAW_JSON"): return scriptsafe_dumps(data) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 400: return self.send400() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: failien_url = make_failien_url() sad_message = get_funny_translated_string("500_page") sad_message %= {'admin': random.choice(self.admins)} sad_message = safemarkdown(sad_message) return redditbroke % (failien_url, sad_message) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except Exception as e: return handle_awful_failure("ErrorController.GET_document: %r" % e)
def pre(self): g.cache.caches = (LocalCache(),) + g.cache.caches[1:] #check if user-agent needs a dose of rate-limiting if not c.error_page: ratelimit_agents() # the domain has to be set before Cookies get initialized set_subreddit() set_cnameframe() # populate c.cookies c.cookies = Cookies() try: for k,v in request.cookies.iteritems(): # we can unquote even if it's not quoted c.cookies[k] = Cookie(value=unquote(v), dirty=False) except CookieError: #pylons or one of the associated retarded libraries can't #handle broken cookies request.environ['HTTP_COOKIE'] = '' c.response_wrappers = [] c.errors = ErrorSet() c.firsttime = firsttime() (c.user, maybe_admin) = valid_cookie(current_login_cookie()) if c.user: c.user_is_loggedin = True else: c.user = UnloggedUser(get_browser_langs()) c.user._load() if c.user_is_loggedin: if not c.user._loaded: c.user._load() c.modhash = c.user.modhash() if request.method.lower() == 'get': read_click_cookie() read_mod_cookie() if hasattr(c.user, 'msgtime') and c.user.msgtime: c.have_messages = c.user.msgtime c.user_is_admin = maybe_admin and c.user.name in g.admins c.user_is_sponsor = c.user_is_admin or c.user.name in g.sponsors c.over18 = over18() #set_browser_langs() set_host_lang() set_content_type() set_iface_lang() set_content_lang() set_colors() set_recent_reddits() # set some environmental variables in case we hit an abort if not isinstance(c.site, FakeSubreddit): request.environ['REDDIT_NAME'] = c.site.name # check if the user has access to this subreddit if not c.site.can_view(c.user) and not c.error_page: abort(403, "forbidden") #check over 18 if (c.site.over_18 and not c.over18 and request.path not in ("/frame", "/over18") and c.render_style == 'html'): return self.intermediate_redirect("/over18") #check whether to allow custom styles c.allow_styles = True if g.css_killswitch: c.allow_styles = False #if the preference is set and we're not at a cname elif not c.user.pref_show_stylesheets and not c.cname: c.allow_styles = False #if the site has a cname, but we're not using it elif c.site.domain and not c.cname: c.allow_styles = False #check content cache if not c.user_is_loggedin: r = cache.get(self.request_key()) if r and request.method == 'GET': response = c.response response.headers = r.headers response.content = r.content for x in r.cookies.keys(): if x in cache_affecting_cookies: cookie = r.cookies[x] response.set_cookie(key = x, value = cookie.value, domain = cookie.get('domain',None), expires = cookie.get('expires',None), path = cookie.get('path',None)) response.status_code = r.status_code request.environ['pylons.routes_dict']['action'] = 'cached_response' # make sure to carry over the content type c.response_content_type = r.headers['content-type'] if r.headers.has_key('access-control'): c.response_access_control = r.headers['access-control'] c.used_cache = True # response wrappers have already been applied before cache write c.response_wrappers = []
def setUp(self): # Reset the validator state and errors before every test. #self.validator = VUname(("user",)) self.validator = VUname(None) c.errors = ErrorSet()