def GET_refresh_token(self, *args, **kwargs): # pylint: disable=unused-argument """Generate a refresh token given a username""" username = request.GET['username'] try: account = Account._by_name(username) except NotFound: account = register(username, uuid4().hex, '127.0.0.1') # subscribe the user now because reddit does not have consistency across # its APIs on what it considers the user to be subscribed to if not account.has_subscribed: Subreddit.subscribe_defaults(account) account.has_subscribed = True account._commit() client_id = g.secrets['generate_refresh_token_client_id'] client = OAuth2Client.get_token(client_id) scope = OAuth2Scope(OAuth2Scope.FULL_ACCESS) user_id = account._id36 refresh_token = OAuth2RefreshToken._new( client_id=client._id, user_id=user_id, scope=scope, ) access_token = OAuth2AccessToken._new( client_id=client._id, user_id=user_id, scope=scope, device_id='device', ) return json.dumps(OAuth2AccessController._make_new_token_response(access_token, refresh_token))
def setUp(self): a = Account() a._commit() sr = Subreddit(name = 'subreddit_name_%s' % (self.seconds_since_epoc(),), title = 'subreddit_title_%s' % (self.seconds_since_epoc(),),) sr._commit() self.rel = SRMember(sr, a, 'test')
def testExcludeNegativeLookups(self): self.cache.get_multi.return_value = {"doesnotexist": Subreddit.SRNAME_NOTFOUND} with self.assertRaises(NotFound): Subreddit._by_name("doesnotexist") self.assertEqual(self.subreddit_query.call_count, 0) self.assertEqual(self.subreddit_byID.call_count, 0) self.assertEqual(self.cache.set_multi.call_count, 0)
def testSingleCached(self): subreddit = Subreddit(id=1, name="exists") self.cache.get_multi.return_value = {"exists": subreddit._id} self.subreddit_byID.return_value = [subreddit] ret = Subreddit._by_name("exists") self.assertEqual(ret, subreddit) self.assertEqual(self.subreddit_query.call_count, 0)
def testSingleFromDB(self): subreddit = Subreddit(id=1, name="exists") self.cache.get_multi.return_value = {} self.subreddit_query.return_value = [subreddit] self.subreddit_byID.return_value = [subreddit] ret = Subreddit._by_name("exists") self.assertEqual(ret, subreddit) self.assertEqual(self.cache.add_multi.call_count, 1)
def testCacheNegativeResults(self): self.cache.get_multi.return_value = {} self.subreddit_query.return_value = [] self.subreddit_byID.return_value = [] with self.assertRaises(NotFound): Subreddit._by_name("doesnotexist") self.cache.add_multi.assert_called_once_with( {"doesnotexist": Subreddit.SRNAME_NOTFOUND}, prefix="srid:")
def testCacheNegativeResults(self): self.cache.get_multi.return_value = {} self.subreddit_query.return_value = [] self.subreddit_byID.return_value = [] with self.assertRaises(NotFound): Subreddit._by_name("doesnotexist") self.cache.set_multi.assert_called_once_with( {"doesnotexist": Subreddit.SRNAME_NOTFOUND}, prefix="srid:")
def testExcludeNegativeLookups(self): self.cache.get_multi.return_value = { "doesnotexist": Subreddit.SRNAME_NOTFOUND } with self.assertRaises(NotFound): Subreddit._by_name("doesnotexist") self.assertEqual(self.subreddit_query.call_count, 0) self.assertEqual(self.subreddit_byID.call_count, 0) self.assertEqual(self.cache.add_multi.call_count, 0)
def get_promote_srid(name = 'promos'): try: sr = Subreddit._by_name(name, stale=True) except NotFound: sr = Subreddit._new(name = name, title = "promoted links", # negative author_ids make this unlisable author_id = -1, type = "public", ip = '0.0.0.0') return sr._id
def testMultiSomeDontExist(self): sr = Subreddit(id=1, name="exists") self.cache.get_multi.return_value = {sr.name: sr._id} self.subreddit_query.return_value = [] self.subreddit_byID.return_value = [sr] ret = Subreddit._by_name(["exists", "doesnt"]) self.assertEqual(ret, {sr.name: sr}) self.assertEqual(self.cache.get_multi.call_count, 1) self.assertEqual(self.subreddit_query.call_count, 1)
def testForceUpdate(self): sr = Subreddit(id=1, name="exists") self.cache.get_multi.return_value = {sr.name: sr._id} self.subreddit_query.return_value = [sr] self.subreddit_byID.return_value = [sr] ret = Subreddit._by_name("exists", _update=True) self.assertEqual(ret, sr) self.cache.add_multi.assert_called_once_with({sr.name: sr._id}, prefix="srid:")
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET', 'HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = set(Subreddit._by_name(sr_names, stale=can_stale).values()) if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if len(srs) == 0: c.site = MultiReddit([], sr_name) elif len(srs) == 1: c.site = srs.pop() else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, sr_name) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith( "/api/login/"): abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def testMultiSomeInvalid(self): sr = Subreddit(id=1, name="exists") self.cache.get_multi.return_value = {sr.name: sr._id} self.subreddit_query.return_value = [] self.subreddit_byID.return_value = [sr] ret = Subreddit._by_name(["exists", "_illegalunderscore"]) self.assertEqual(ret, {sr.name: sr}) self.assertEqual(self.cache.get_multi.call_count, 1) self.assertEqual(self.subreddit_query.call_count, 0)
def testMultiCached(self): srs = [ Subreddit(id=1, name="exists"), Subreddit(id=2, name="also"), ] self.cache.get_multi.return_value = {sr.name: sr._id for sr in srs} self.subreddit_byID.return_value = srs ret = Subreddit._by_name(["exists", "also"]) self.assertEqual(ret, {sr.name: sr for sr in srs}) self.assertEqual(self.subreddit_query.call_count, 0)
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET','HEAD') c.site = Frontpage if not sr_name: #check for cnames cname = request.environ.get('legacy-cname') if cname: sr = Subreddit._by_domain(cname) or Frontpage domain = g.domain if g.domain_prefix: domain = ".".join((g.domain_prefix, domain)) redirect_to('http://%s%s' % (domain, sr.path), _code=301) elif sr_name == 'r': #reddits c.site = Sub elif '+' in sr_name: sr_names = sr_name.split('+') srs = set(Subreddit._by_name(sr_names, stale=can_stale).values()) if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if len(srs) == 0: c.site = MultiReddit([], sr_name) elif len(srs) == 1: c.site = srs.pop() else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, sr_name) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith("/api/login/") : abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET','HEAD') default_sr = DefaultSR() c.site = default_sr if not sr_name: #check for cnames sub_domain = request.environ.get('sub_domain') if sub_domain and not sub_domain.endswith(g.media_domain): c.site = Subreddit._by_domain(sub_domain) or default_sr elif sr_name == 'r': #reddits c.site = Sub else: try: if '+' in sr_name: srs = set() sr_names = sr_name.split('+') real_path = sr_name srs = Subreddit._by_name(sr_names, stale=can_stale).values() if len(srs) != len(sr_names): abort(404) elif any(isinstance(sr, FakeSubreddit) for sr in srs): if All in srs: c.site = All elif Friend in srs: c.site = Friend else: abort(400) else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, real_path) else: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith("/api/login/") : abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") can_stale = request.method.upper() in ('GET','HEAD') default_sr = DefaultSR() c.site = default_sr if not sr_name: #check for cnames sub_domain = request.environ.get('sub_domain') if sub_domain and not sub_domain.endswith(g.media_domain): c.site = Subreddit._by_domain(sub_domain) or default_sr elif sr_name == 'r': #reddits c.site = Sub else: try: if '+' in sr_name: srs = set() sr_names = sr_name.split('+') real_path = sr_name srs = Subreddit._by_name(sr_names, stale=can_stale).values() if len(srs) != len(sr_names): abort(404) elif any(isinstance(sr, FakeSubreddit) for sr in srs): if All in srs: c.site = All elif Friend in srs: c.site = Friend else: abort(400) else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, real_path) else: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page: abort(404) #if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def set_subreddit(): # the r parameter gets added by javascript for POST requests so we # can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get("r")) domain = request.environ.get("domain") can_stale = request.method.upper() in ("GET", "HEAD") default_sr = DefaultSR() c.site = default_sr if not sr_name: # check for cnames sub_domain = request.environ.get("sub_domain") if sub_domain and not sub_domain.endswith(g.media_domain): c.site = Subreddit._by_domain(sub_domain) or default_sr elif sr_name == "r": # reddits c.site = Sub elif "+" in sr_name: sr_names = sr_name.split("+") srs = set(Subreddit._by_name(sr_names, stale=can_stale).values()) if All in srs: c.site = All elif Friends in srs: c.site = Friends else: srs = [sr for sr in srs if not isinstance(sr, FakeSubreddit)] if len(srs) == 0: c.site = MultiReddit([], sr_name) elif len(srs) == 1: c.site = srs.pop() else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, sr_name) else: try: c.site = Subreddit._by_name(sr_name, stale=can_stale) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page and not request.path.startswith("/api/login/"): abort(404) # if we didn't find a subreddit, check for a domain listing if not sr_name and isinstance(c.site, DefaultSR) and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") if not sr_name: #check for cnames sub_domain = request.environ.get('sub_domain') sr = Subreddit._by_domain(sub_domain) if sub_domain else None c.site = sr or Default elif sr_name == 'r': #reddits c.site = Sub else: try: if '+' in sr_name: srs = set() sr_names = sr_name.split('+') real_path = sr_name for sr_name in sr_names: srs.add(Subreddit._by_name(sr_name)) sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, real_path) else: c.site = Subreddit._by_name(sr_name) except NotFound: c.site = Default if chksrname(sr_name): redirect_to("/categories/create?name=%s" % sr_name) elif not c.error_page: abort(404, "not found") #if we didn't find a subreddit, check for a domain listing if not sr_name and c.site == Default and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True try: c.current_or_default_sr = Subreddit._by_name(g.default_sr) except NotFound: c.current_or_default_sr = None else: c.current_or_default_sr = c.site # check that the site is available: if c.site._spam and not c.user_is_admin and not c.error_page: abort(404, "not found")
def set_subreddit(): # the r parameter gets added by javascript for POST requests so we # can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get("r")) domain = request.environ.get("domain") c.site = Default if not sr_name: # check for cnames sub_domain = request.environ.get("sub_domain") if sub_domain and not sub_domain.endswith(g.media_domain): c.site = Subreddit._by_domain(sub_domain) or Default elif sr_name == "r": # reddits c.site = Sub else: try: if "+" in sr_name: srs = set() sr_names = sr_name.split("+") real_path = sr_name srs = Subreddit._by_name(sr_names).values() if len(srs) != len(sr_names): abort(404) elif any(isinstance(sr, FakeSubreddit) for sr in srs): if All in srs: c.site = All elif Friend in srs: c.site = Friend else: abort(400) else: sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, real_path) else: c.site = Subreddit._by_name(sr_name) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page: abort(404) # if we didn't find a subreddit, check for a domain listing if not sr_name and c.site == Default and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def _add_multi_srs(self, multi, sr_datas): srs = Subreddit._by_name(sr_data['name'] for sr_data in sr_datas) for sr in srs.itervalues(): if isinstance(sr, FakeSubreddit): raise RedditError('MULTI_SPECIAL_SUBREDDIT', msg_params={'path': sr.path}, code=400) sr_props = {} for sr_data in sr_datas: try: sr = srs[sr_data['name']] except KeyError: raise RedditError('SUBREDDIT_NOEXIST', code=400) else: # name is passed in via the API data format, but should not be # stored on the model. del sr_data['name'] sr_props[sr] = sr_data try: multi.add_srs(sr_props) except TooManySubredditsError as e: raise RedditError('MULTI_TOO_MANY_SUBREDDITS', code=409) return sr_props
def GET_document(self): try: #no cookies on errors c.cookies.clear() code = request.GET.get('code', '') srname = request.GET.get('srname', '') if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: return str(code) elif code == '403': return self.send403() elif code == '500': return redditbroke % rand_strings.sadmessages elif code == '503': c.response.status_code = 503 c.response.headers['Retry-After'] = 1 c.response.content = toofast return c.response elif code == '304': if request.GET.has_key('x-sup-id'): c.response.headers['x-sup-id'] = request.GET.get('x-sup-id') return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def set_recent_reddits(): names = read_user_cookie('recent_reddits') c.recent_reddits = [] if names: names = filter(None, names.split(',')) c.recent_reddits = Subreddit._by_fullname(names, data = True, return_dict = False)
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") # StatusBasedRedirect will override this anyway, but we need this # here for pagecache to see. response.status_int = code if srname: c.site = Subreddit._by_name(srname) if request.GET.has_key('allow_framing'): c.allow_framing = bool(request.GET['allow_framing'] == '1') if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: response.headers['x-sup-id'] = x_sup_id return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) if request.environ.get("WANT_RAW_JSON"): return scriptsafe_dumps(data) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': random.choice(self.admins)} failien_url = make_failien_url() sad_message = safemarkdown(rand_strings.sadmessages % randmin) return redditbroke % (failien_url, sad_message) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except Exception as e: return handle_awful_failure("ErrorController.GET_document: %r" % e)
def subreddits_slow(self): if self._subreddits is not None: return self._subreddits sr_names = self.subreddit_names srs = Subreddit._by_name(sr_names).values() self._subreddits = srs return srs
def set_recent_reddits(): names = read_user_cookie("recent_reddits") c.recent_reddits = [] if names: names = filter(None, names.strip("[]").split(",")) try: c.recent_reddits = Subreddit._by_fullname(names, data=True, return_dict=False) except NotFound: pass
def embeddable_sr(thing): if isinstance(thing, Subreddit): sr = thing else: try: sr = Subreddit._byID(thing.sr_id) if thing.sr_id else None except NotFound: sr = None return sr if (sr is not None and sr.type not in Subreddit.private_types) else False
def testSingleFromDB(self): subreddit = Subreddit(id=1, name="exists") self.cache.get_multi.return_value = {} self.subreddit_query.return_value = [subreddit] self.subreddit_byID.return_value = [subreddit] ret = Subreddit._by_name("exists") self.assertEqual(ret, subreddit) self.assertEqual(self.cache.set_multi.call_count, 1)
def set_recent_reddits(): names = read_user_cookie('recent_reddits') c.recent_reddits = [] if names: names = filter(None, names.strip('[]').split(',')) try: c.recent_reddits = Subreddit._by_fullname(names, data = True, return_dict = False) except NotFound: pass
def embeddable_sr(thing): if isinstance(thing, Subreddit): sr = thing else: try: sr = Subreddit._byID(thing.sr_id) if thing.sr_id else None except NotFound: sr = None return sr if (sr is not None and sr.type not in DISALLOWED_SR_TYPES) else False
def get_roadblocks(cls): ret = [] q = cls._cf.get_range() rows = list(q) srs = Subreddit._byID36([id36 for id36, columns in rows], data=True) for id36, columns in rows: sr = srs[id36] for key in columns.iterkeys(): start, end = cls._dates_from_key(key) ret.append((sr.name, start, end)) return ret
def testForceUpdate(self): sr = Subreddit(id=1, name="exists") self.cache.get_multi.return_value = {sr.name: sr._id} self.subreddit_query.return_value = [sr] self.subreddit_byID.return_value = [sr] ret = Subreddit._by_name("exists", _update=True) self.assertEqual(ret, sr) self.cache.set_multi.assert_called_once_with( {sr.name: sr._id}, prefix="srid:")
def embeddable_sr(thing): if isinstance(thing, Subreddit): sr = thing else: try: sr = Subreddit._byID(thing.sr_id, data=True) if thing.sr_id else None except NotFound: sr = None return sr if (sr is not None and sr.type not in Subreddit.private_types) else False
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get("code", "") try: code = int(code) except ValueError: code = 404 srname = request.GET.get("srname", "") takedown = request.GET.get("takedown", "") # StatusBasedRedirect will override this anyway, but we need this # here for pagecache to see. response.status_int = code if srname: c.site = Subreddit._by_name(srname) if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. if request.GET.has_key("x-sup-id"): x_sup_id = request.GET.get("x-sup-id") if "\r\n" not in x_sup_id: response.headers["x-sup-id"] = x_sup_id return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get("extra_error_data", {"error": code}) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {"admin": random.choice(self.admins)} failien_url = make_failien_url() return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def _get_multi_sr_owner(self, path_info): try: sr = Subreddit._by_name(path_info['owner']) except NotFound: raise RedditError('SUBREDDIT_NOEXIST', code=404) if (not sr.is_moderator_with_perms(c.user, 'config') and not c.user_is_admin): raise RedditError('MULTI_CANNOT_EDIT', code=403, fields='multipath') return sr
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) c.response.status_code = code return c.response elif c.render_style == "api": data = request.environ.get('extra_error_data', {'error': code}) c.response.content = json.dumps(data) return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': rand.choice(self.admins)} failien_name = 'youbrokeit%d.png' % rand.randint( 1, NUM_FAILIENS) failien_url = static(failien_name) return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: c.response.headers['x-sup-id'] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def set_subreddit(): #the r parameter gets added by javascript for POST requests so we #can reference c.site in api.py sr_name = request.environ.get("subreddit", request.POST.get('r')) domain = request.environ.get("domain") c.site = Default if not sr_name: #check for cnames sub_domain = request.environ.get('sub_domain') if sub_domain and not sub_domain.endswith(g.media_domain): c.site = Subreddit._by_domain(sub_domain) or Default elif sr_name == 'r': #reddits c.site = Sub else: try: if '+' in sr_name: srs = set() sr_names = sr_name.split('+') real_path = sr_name for sr_name in sr_names: srs.add(Subreddit._by_name(sr_name)) sr_ids = [sr._id for sr in srs] c.site = MultiReddit(sr_ids, real_path) else: c.site = Subreddit._by_name(sr_name) except NotFound: sr_name = chksrname(sr_name) if sr_name: redirect_to("/reddits/search?q=%s" % sr_name) elif not c.error_page: abort(404, "not found") #if we didn't find a subreddit, check for a domain listing if not sr_name and c.site == Default and domain: c.site = DomainSR(domain) if isinstance(c.site, FakeSubreddit): c.default_sr = True
def _check_sr_multi_path(self, path_info): try: sr = Subreddit._by_name(path_info['owner']) except NotFound: raise RedditError('SUBREDDIT_NOEXIST', code=404) if (not sr.is_moderator_with_perms(c.user, 'config') and not c.user_is_admin): raise RedditError('MULTI_CANNOT_EDIT', code=403, fields='multipath') return sr
def set_recent_reddits(): names = read_user_cookie('recent_reddits') c.recent_reddits = [] if names: try: names = filter(None, names.split(',')) srs = Subreddit._by_fullname(names, data=True, return_dict=False) # Ensure all the objects returned are Subreddits. Due to the nature # of _by_fullname its possible to get any type back c.recent_reddits = filter(lambda x: isinstance(x, Subreddit), names) except: pass
def set_recent_reddits(): names = read_user_cookie('recent_reddits') c.recent_reddits = [] if names: try: names = filter(None, names.split(',')) srs = Subreddit._by_fullname(names, data = True, return_dict = False) # Ensure all the objects returned are Subreddits. Due to the nature # of _by_fullname its possible to get any type back c.recent_reddits = filter(lambda x: isinstance(x, Subreddit), names) except: pass
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) c.response.status_code = code return c.response elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) c.response.content = websafe_json(json.dumps(data)) return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': rand.choice(self.admins)} failien_name = 'youbrokeit%d.png' % rand.randint(1, NUM_FAILIENS) failien_url = static(failien_name) return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: c.response.headers['x-sup-id'] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: response.headers['x-sup-id'] = x_sup_id return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': random.choice(self.admins)} failien_url = make_failien_url() return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def embeddable_sr(thing): if isinstance(thing, Subreddit): sr = thing else: try: sr = Subreddit._byID(thing.sr_id, data=True) if thing.sr_id else None except NotFound: sr = None if (sr is not None and sr.type not in Subreddit.private_types and not sr.quarantine): return sr else: return False
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) return c.response elif c.render_style == "api": c.response.content = "{error: %s}" % code return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 500: return redditbroke % (rand.randint( 1, NUM_FAILIENS), rand_strings.sadmessages) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): c.response.headers['x-sup-id'] = request.GET.get( 'x-sup-id') return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def __init__(self): self.srs, self.invalid_srs, self.report = [], [], [] self.textarea = request.params.get("subreddits") if self.textarea: requested_srs = [srname.strip() for srname in self.textarea.splitlines()] subreddits = Subreddit._by_name(requested_srs) for srname in requested_srs: if srname in subreddits: self.srs.append(srname) else: self.invalid_srs.append(srname) if subreddits: self.report = make_subreddit_traffic_report(subreddits.values()) param = urllib.quote(self.textarea) self.csv_url = "/traffic/subreddits/report.csv?subreddits=" + param Templated.__init__(self)
def GET_document(self): try: code = request.GET.get('code', '') srname = request.GET.get('srname', '') if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: return str(code) elif code == '403': return self.send403() elif code == '500': return redditbroke % rand_strings.sadmessages elif code == '503': c.response.status_code = 503 c.response.headers['Retry-After'] = 1 c.response.content = toofast return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
# # All portions of the code written by reddit are Copyright (c) 2006-2015 reddit # Inc. All Rights Reserved. ############################################################################### import urllib2 from pylons import g from r2.lib.db.operators import desc from r2.lib.utils import fetch_things2 from r2.lib.media import upload_media from r2.models.subreddit import Subreddit from r2.models.wiki import WikiPage, ImagesByWikiPage all_subreddits = Subreddit._query(sort=desc("_date")) for sr in fetch_things2(all_subreddits): images = sr.images.copy() images.pop("/empties/", None) if not images: continue print 'Processing /r/%s (id36: %s)' % (sr.name, sr._id36) # upgrade old-style image ids to urls for name, image_url in images.items(): if not isinstance(image_url, int): continue print " upgrading image %r" % image_url
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', '') error_name = request.GET.get('error_name', '') if isinstance(c.user, basestring): # somehow requests are getting here with c.user unset c.user_is_loggedin = False c.user = UnloggedUser(browser_langs=None) if srname: c.site = Subreddit._by_name(srname) if request.GET.has_key('allow_framing'): c.allow_framing = bool(request.GET['allow_framing'] == '1') if (error_name == 'IN_TIMEOUT' and not 'usable_error_content' in request.environ): timeout_days_remaining = c.user.days_remaining_in_timeout errpage = pages.InterstitialPage( _("suspended"), content=pages.InTimeoutInterstitial( timeout_days_remaining=timeout_days_remaining, ), ) request.environ['usable_error_content'] = errpage.render() if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) message = request.GET.get('message', '') if message: data['message'] = message if request.environ.get("WANT_RAW_JSON"): return scriptsafe_dumps(data) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 400: return self.send400() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: failien_url = make_failien_url() sad_message = get_funny_translated_string("500_page") sad_message %= {'admin': random.choice(self.admins)} sad_message = safemarkdown(sad_message) return redditbroke % (failien_url, sad_message) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except Exception as e: return handle_awful_failure("ErrorController.GET_document: %r" % e)
def pre(self): c.response_wrappers = [] MinimalController.pre(self) set_cnameframe() # populate c.cookies unless we're on the unsafe media_domain if request.host != g.media_domain or g.media_domain == g.domain: try: for k, v in request.cookies.iteritems(): # minimalcontroller can still set cookies if k not in c.cookies: # we can unquote even if it's not quoted c.cookies[k] = Cookie(value=unquote(v), dirty=False) except CookieError: #pylons or one of the associated retarded libraries #can't handle broken cookies request.environ['HTTP_COOKIE'] = '' c.firsttime = firsttime() # the user could have been logged in via one of the feeds maybe_admin = False # no logins for RSS feed unless valid_feed has already been called if not c.user: if c.extension != "rss": session_cookie = c.cookies.get(g.login_cookie) if session_cookie: c.user = valid_cookie(session_cookie.value) if c.user: c.user_is_loggedin = True admin_cookie = c.cookies.get(g.admin_cookie) if c.user_is_loggedin and admin_cookie: maybe_admin, first_login = valid_admin_cookie( admin_cookie.value) if maybe_admin: self.enable_admin_mode(c.user, first_login=first_login) else: self.disable_admin_mode(c.user) if not c.user: c.user = UnloggedUser(get_browser_langs()) # patch for fixing mangled language preferences if (not isinstance(c.user.pref_lang, basestring) or not all( isinstance(x, basestring) for x in c.user.pref_content_langs)): c.user.pref_lang = g.lang c.user.pref_content_langs = [g.lang] c.user._commit() if c.user_is_loggedin: if not c.user._loaded: c.user._load() c.modhash = c.user.modhash() if request.method.upper() == 'GET': read_mod_cookie() if hasattr(c.user, 'msgtime') and c.user.msgtime: c.have_messages = c.user.msgtime c.show_mod_mail = Subreddit.reverse_moderator_ids(c.user) c.have_mod_messages = getattr(c.user, "modmsgtime", False) c.user_is_admin = maybe_admin and c.user.name in g.admins c.user_special_distinguish = c.user.special_distinguish() c.user_is_sponsor = c.user_is_admin or c.user.name in g.sponsors if request.path != '/validuser' and not g.disallow_db_writes: c.user.update_last_visit(c.start_time) c.over18 = over18() #set_browser_langs() set_host_lang() set_iface_lang() set_content_lang() set_recent_clicks() # used for HTML-lite templates set_colors() # set some environmental variables in case we hit an abort if not isinstance(c.site, FakeSubreddit): request.environ['REDDIT_NAME'] = c.site.name # random reddit trickery -- have to do this after the content lang is set if c.site == Random: c.site = Subreddit.random_reddit() redirect_to("/" + c.site.path.strip('/') + request.path) elif c.site == RandomNSFW: c.site = Subreddit.random_reddit(over18=True) redirect_to("/" + c.site.path.strip('/') + request.path) if not request.path.startswith("/api/login/"): # is the subreddit banned? if c.site.spammy() and not c.user_is_admin and not c.error_page: ban_info = getattr(c.site, "ban_info", {}) if "message" in ban_info: message = ban_info['message'] else: sitelink = url_escape(add_sr("/")) subject = ("/r/%s has been incorrectly banned" % c.site.name) link = ("/r/redditrequest/submit?url=%s&title=%s" % (sitelink, subject)) message = strings.banned_subreddit_message % dict( link=link) errpage = pages.RedditError(strings.banned_subreddit_title, message, image="subreddit-banned.png") request.environ['usable_error_content'] = errpage.render() self.abort404() # check if the user has access to this subreddit if not c.site.can_view(c.user) and not c.error_page: errpage = pages.RedditError(strings.private_subreddit_title, strings.private_subreddit_message, image="subreddit-private.png") request.environ['usable_error_content'] = errpage.render() self.abort403() #check over 18 if (c.site.over_18 and not c.over18 and request.path not in ("/frame", "/over18") and c.render_style == 'html'): return self.intermediate_redirect("/over18") #check whether to allow custom styles c.allow_styles = True c.can_apply_styles = self.allow_stylesheets if g.css_killswitch: c.can_apply_styles = False #if the preference is set and we're not at a cname elif not c.user.pref_show_stylesheets and not c.cname: c.can_apply_styles = False #if the site has a cname, but we're not using it elif c.site.domain and c.site.css_on_cname and not c.cname: c.can_apply_styles = False