def GET_document(self): try: #no cookies on errors c.cookies.clear() code = request.GET.get('code', '') srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: return str(code) elif takedown and code == '404': link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == '403': return self.send403() elif code == '500': return redditbroke % rand_strings.sadmessages elif code == '503': c.response.status_code = 503 c.response.headers['Retry-After'] = 1 c.response.content = toofast return c.response elif code == '304': if request.GET.has_key('x-sup-id'): c.response.headers['x-sup-id'] = request.GET.get('x-sup-id') return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def calc_rising(): sr_count = count.get_link_counts() link_count = dict((k, v[0]) for k,v in sr_count.iteritems()) link_names = Link._by_fullname(sr_count.keys(), data=True) #max is half the average of the top 10 counts counts = link_count.values() counts.sort(reverse=True) maxcount = sum(counts[:10]) / 20 #prune the list rising = [(n, link_names[n].sr_id) for n in link_names.keys() if link_count[n] < maxcount] cur_time = datetime.now(g.tz) def score(pair): name = pair[0] link = link_names[name] hours = (cur_time - link._date).seconds / 3600 + 1 return float(link._ups) / (max(link_count[name], 1) * hours) def r(x): return 1 if x > 0 else -1 if x < 0 else 0 rising.sort(lambda x, y: r(score(y) - score(x))) return rising
def calc_rising(): #As far as I can tell this can only ever return a series of 0's as that is what is hard coded in... In which case nothing should ever be rising unless I explicitly make it so. sr_count = count.get_link_counts() link_count = dict((k, v[0]) for k,v in sr_count.iteritems()) link_names = Link._by_fullname(sr_count.keys(), data=True) #max is half the average of the top 10 counts counts = link_count.values() counts.sort(reverse=True) maxcount = sum(counts[:10]) / 2.*min(10,len(counts)) #prune the list print link_count print link_names print maxcount rising = [(n, link_names[n].sr_id) for n in link_names.keys() if False or link_count[n] < maxcount] print rising cur_time = datetime.now(g.tz) def score(pair): name = pair[0] link = link_names[name] hours = (cur_time - link._date).seconds / 3600 + 1 return float(link._ups) / (max(link_count[name], 1) * hours) def r(x): return 1 if x > 0 else -1 if x < 0 else 0 rising.sort(lambda x, y: r(score(y) - score(x))) return rising
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") # StatusBasedRedirect will override this anyway, but we need this # here for pagecache to see. response.status_int = code if srname: c.site = Subreddit._by_name(srname) if request.GET.has_key('allow_framing'): c.allow_framing = bool(request.GET['allow_framing'] == '1') if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: response.headers['x-sup-id'] = x_sup_id return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) if request.environ.get("WANT_RAW_JSON"): return scriptsafe_dumps(data) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': random.choice(self.admins)} failien_url = make_failien_url() sad_message = safemarkdown(rand_strings.sadmessages % randmin) return redditbroke % (failien_url, sad_message) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except Exception as e: return handle_awful_failure("ErrorController.GET_document: %r" % e)
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get("code", "") try: code = int(code) except ValueError: code = 404 srname = request.GET.get("srname", "") takedown = request.GET.get("takedown", "") # StatusBasedRedirect will override this anyway, but we need this # here for pagecache to see. response.status_int = code if srname: c.site = Subreddit._by_name(srname) if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. if request.GET.has_key("x-sup-id"): x_sup_id = request.GET.get("x-sup-id") if "\r\n" not in x_sup_id: response.headers["x-sup-id"] = x_sup_id return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get("extra_error_data", {"error": code}) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {"admin": random.choice(self.admins)} failien_url = make_failien_url() return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) c.response.status_code = code return c.response elif c.render_style == "api": data = request.environ.get('extra_error_data', {'error': code}) c.response.content = json.dumps(data) return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': rand.choice(self.admins)} failien_name = 'youbrokeit%d.png' % rand.randint( 1, NUM_FAILIENS) failien_url = static(failien_name) return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: c.response.headers['x-sup-id'] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def process_link(msg): fname = msg.body link = Link._by_fullname(msg.body, data=True) try: TimeoutFunction(_set_media, 30)(embedly_services, link) except TimeoutFunctionException: print "Timed out on %s" % fname except KeyboardInterrupt: raise except: print "Error fetching %s" % fname print traceback.format_exc()
def process_link(msg): fname = msg.body link = Link._by_fullname(msg.body, data=True) try: TimeoutFunction(_set_media, 30)(link) except TimeoutFunctionException: print "Timed out on %s" % fname except KeyboardInterrupt: raise except: print "Error fetching %s" % fname print traceback.format_exc()
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) c.response.status_code = code return c.response elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) c.response.content = websafe_json(json.dumps(data)) return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': rand.choice(self.admins)} failien_name = 'youbrokeit%d.png' % rand.randint(1, NUM_FAILIENS) failien_url = static(failien_name) return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: c.response.headers['x-sup-id'] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: response.headers['x-sup-id'] = x_sup_id return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: randmin = {'admin': random.choice(self.admins)} failien_url = make_failien_url() return redditbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def calc_rising(): link_counts = count.get_link_counts() links = Link._by_fullname(link_counts.keys(), data=True) def score(link): count = link_counts[link._fullname][0] return float(link._ups) / max(count, 1) # build the rising list, excluding items having 1 or less upvotes rising = [] for link in links.values(): if link._ups > 1: rising.append((link._fullname, score(link), link.sr_id)) # return rising sorted by score return sorted(rising, key=lambda x: x[1], reverse=True)
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subsciteit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) return c.response elif c.render_style == "api": c.response.content = "{error: %s}" % code return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 500: randmin = {'admin': rand.choice(self.admins)} failien_name = 'youbrokeit%d.png' % rand.randint(1, NUM_FAILIENS) failien_url = static(failien_name) return sciteitbroke % (failien_url, rand_strings.sadmessages % randmin) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): x_sup_id = request.GET.get('x-sup-id') if '\r\n' not in x_sup_id: c.response.headers['x-sup-id'] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) return c.response elif c.render_style == "api": c.response.content = "{error: %s}" % code return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 500: return redditbroke % (rand.randint( 1, NUM_FAILIENS), rand_strings.sadmessages) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key('x-sup-id'): c.response.headers['x-sup-id'] = request.GET.get( 'x-sup-id') return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get("code", "") try: code = int(code) except ValueError: code = 404 srname = request.GET.get("srname", "") takedown = request.GET.get("takedown", "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: if code not in (204, 304): c.response.content = str(code) return c.response elif c.render_style == "api": c.response.content = "{error: %s}" % code return c.response elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 403: return self.send403() elif code == 500: return redditbroke % (rand.randint(1, NUM_FAILIENS), rand_strings.sadmessages) elif code == 503: return self.send503() elif code == 304: if request.GET.has_key("x-sup-id"): x_sup_id = request.GET.get("x-sup-id") if "\r\n" not in x_sup_id: c.response.headers["x-sup-id"] = x_sup_id return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def GET_document(self): try: # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', "") if srname: c.site = Subreddit._by_name(srname) if c.render_style not in self.allowed_render_styles: c.response.content = str(int(code)) return c.response elif c.render_style == "api": c.response.content = "{error: %s}" % code return c.response elif takedown and code == '404': link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == '403': return self.send403() elif code == '500': return redditbroke % (rand.randint(1,NUM_FAILIENS), rand_strings.sadmessages) elif code == '503': return self.send503() elif code == '304': if request.GET.has_key('x-sup-id'): c.response.headers['x-sup-id'] = request.GET.get('x-sup-id') return c.response elif c.site: return self.send404() else: return "page not found" except: return handle_awful_failure("something really bad just happened.")
def GET_document(self): try: c.errors = c.errors or ErrorSet() # clear cookies the old fashioned way c.cookies = Cookies() code = request.GET.get('code', '') try: code = int(code) except ValueError: code = 404 srname = request.GET.get('srname', '') takedown = request.GET.get('takedown', '') error_name = request.GET.get('error_name', '') if isinstance(c.user, basestring): # somehow requests are getting here with c.user unset c.user_is_loggedin = False c.user = UnloggedUser(browser_langs=None) if srname: c.site = Subreddit._by_name(srname) if request.GET.has_key('allow_framing'): c.allow_framing = bool(request.GET['allow_framing'] == '1') if (error_name == 'IN_TIMEOUT' and not 'usable_error_content' in request.environ): timeout_days_remaining = c.user.days_remaining_in_timeout errpage = pages.InterstitialPage( _("suspended"), content=pages.InTimeoutInterstitial( timeout_days_remaining=timeout_days_remaining, ), ) request.environ['usable_error_content'] = errpage.render() if code in (204, 304): # NEVER return a content body on 204/304 or downstream # caches may become very confused. return "" elif c.render_style not in self.allowed_render_styles: return str(code) elif c.render_style in extensions.API_TYPES: data = request.environ.get('extra_error_data', {'error': code}) message = request.GET.get('message', '') if message: data['message'] = message if request.environ.get("WANT_RAW_JSON"): return scriptsafe_dumps(data) return websafe_json(json.dumps(data)) elif takedown and code == 404: link = Link._by_fullname(takedown) return pages.TakedownPage(link).render() elif code == 400: return self.send400() elif code == 403: return self.send403() elif code == 429: return self.send429() elif code == 500: failien_url = make_failien_url() sad_message = get_funny_translated_string("500_page") sad_message %= {'admin': random.choice(self.admins)} sad_message = safemarkdown(sad_message) return redditbroke % (failien_url, sad_message) elif code == 503: return self.send503() elif c.site: return self.send404() else: return "page not found" except Exception as e: return handle_awful_failure("ErrorController.GET_document: %r" % e)
def _process_link(fname): link = Link._by_fullname(fname, data=True) set_media(link)
def process_message(msg): fname = msg.body link = Link._by_fullname(fname, data=True) extract_keywords(link)
def _process_link(fname): print "media: Processing %s" % fname link = Link._by_fullname(fname, data=True, return_dict=False) set_media(link)
def _process_link(fname): link = Link._by_fullname(fname, data=True, return_dict=False) set_media(link)