def get(self): rules = RuleList.getList('gfwlist') if rules is None: self.error(500) return self.lastModified(rules.date) self.response.headers['Content-Type'] = 'application/x-javascript' self.response.out.write(generateJs(rules.toDict()))
def get(self): for name, url in (('gfwlist', 'http://autoproxy-gfwlist.googlecode.com/svn/trunk/gfwlist.txt'),): r = RuleList.getList(name) if r == None: r = RuleList(name=name, url=url) if r.update(): logging.info('%s updated to %s' , name, r.date) if MAIN_SERVER: if name == 'gfwlist': memcache.delete('/gfwtest.js', namespace='response') memcache.delete('changelog/%s' % name) taskqueue.add(url='/tasks/feed_ping', params={'url':'http://feeds.feedburner.com/%s' % name})
def get(self, urlpart): download = self.request.get('download', None) is not None # Redirect to usage page for visits from links (obviously not a browser PAC fetcher) if MAIN_SERVER and not download and 'Referer' in self.request.headers: self.redirect("/usage?u=" + urlpart, permanent=False) return if not self.parseRequest(urlpart): self.error(404) return rules = RuleList.getList('gfwlist') if rules is None: self.error(500) return pacTime = formatdate(timegm(max(self.settingTime, datetime(*parsedate(rules.date)[:6])).timetuple()), False, True) self.response.headers['ETag'] = '"' + pacTime.replace(',', '').replace(' ', '') + '"' self.lastModified(pacTime) # Load balance if MAIN_SERVER and len(self.customRules) <= MAX_CUSTOM_RULE_NUMBER_FOR_MIRROR: mirror = self.pickMirror() if mirror: query = ['e=' + urlsafe_b64encode(r) for r in self.customRules] if download: query.append('download') mirror = '%s/%s?%s' % (mirror, self.proxyDict['urlpart'], '&'.join(query)) logging.debug('Redirect the PAC fetcher to %s', mirror) if not DEBUG: # A fixed server for a rate-limiting cycle self.response.headers['Cache-Control'] = 'public,max-age=%d' % (RATELIMIT_DURATION * 3600) self.redirect(mirror, permanent=False) return if RATELIMIT_ENABLED and self.isRateLimited(): return customJs = autoproxy2pac.rule2js('\n'.join([''] + self.customRules)) if self.proxyDict['name'] == 'privoxy': customJs = privoxyConfCode + customJs configs = { 'proxyString': self.proxyString, 'defaultString': 'DIRECT', 'customCodePre': customJs, } pac = autoproxy2pac.generatePac(rules.toDict(), configs, autoproxy2pac.defaultPacTemplate) import base64 pac = '''function decode64(_1){var _2="ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=";var _3="";var _4,_5,_6;var _7,_8,_9,_a;var i=0;_1=_1.replace(/[^A-Za-z0-9\+\/\=]/g,"");do{_7=_2.indexOf(_1.charAt(i++));_8=_2.indexOf(_1.charAt(i++));_9=_2.indexOf(_1.charAt(i++));_a=_2.indexOf(_1.charAt(i++));_4=(_7<<2)|(_8>>4);_5=((_8&15)<<4)|(_9>>2);_6=((_9&3)<<6)|_a;_3=_3+String.fromCharCode(_4);if(_9!=64){_3=_3+String.fromCharCode(_5);}if(_a!=64){_3=_3+String.fromCharCode(_6);}}while(i<_1.length);return _3;}eval(decode64("%s"))''' % base64.b64encode(pac) self.response.headers['Content-Type'] = 'application/x-ns-proxy-autoconfig' if download: self.response.headers['Content-Disposition'] = 'attachment; filename="autoproxy.pac"' self.response.out.write(pac)
def get(self): for name, url in (("gfwlist", "http://autoproxy-gfwlist.googlecode.com/svn/trunk/gfwlist.txt"),): r = RuleList.getList(name) if r == None: r = RuleList(name=name, url=url) if r.update(): logging.info("%s updated to %s", name, r.date) if MAIN_SERVER: if name == "gfwlist": memcache.delete("/gfwtest.js", namespace="response") memcache.delete("changelog/%s" % name) taskqueue.add(url="/tasks/feed_ping", params={"url": "http://feeds.feedburner.com/%s" % name})
def get(self, urlpart): urlpart = urlpart.lower() download = self.request.get('download') is not None # Redirect to usage page for visits from links (obviously not a browser PAC fetcher) if MAIN_SERVER and not download and 'Referer' in self.request.headers: self.redirect("/usage?u=" + urlpart, permanent=False) return proxyString = self.parseProxyString(urlpart) if not proxyString: self.error(404) return rules = RuleList.getList('gfwlist') if rules is None: self.error(500) return self.response.headers['ETag'] = '"' + rules.date.replace(',', '').replace(' ', '') + '"' self.lastModified(rules.date) # Load balance if MAIN_SERVER: mirror = self.pickMirror() if mirror: mirror = '%s/%s?%s' % (mirror, urlpart, self.request.query_string) logging.debug('Redirect the PAC fetcher to %s', mirror) if not DEBUG: # A fixed server for a rate-limiting cycle self.response.headers['Cache-Control'] = 'public,max-age=%d' % (RATELIMIT_DURATION * 3600) self.redirect(mirror, permanent=False) return if RATELIMIT_ENABLED and self.isRateLimited(): return customRules = self.request.get_all('c') customJs = autoproxy2pac.rule2js('\n'.join([''] + customRules)) if urlpart == 'privoxy': customJs = privoxyConfCode + customJs configs = { 'proxyString': proxyString, 'defaultString': 'DIRECT', 'customCodePre': customJs, } pac = autoproxy2pac.generatePac(rules.toDict(), configs, autoproxy2pac.defaultPacTemplate) self.response.headers['Content-Type'] = 'application/x-ns-proxy-autoconfig' if download: self.response.headers['Content-Disposition'] = 'attachment; filename="autoproxy.pac"' self.response.out.write(pac)
def get(self, name): name = name.lower() rules = RuleList.getList(name) if rules is None: self.error(404) return # Conditional redirect to FeedBurner # @see: http://www.google.com/support/feedburner/bin/answer.py?hl=en&answer=78464 if (self.request.get('raw', None) is None and # http://host/path/name.rss?raw 'FeedBurner' not in self.request.user_agent): # FeedBurner fetcher self.redirect('http://feeds.feedburner.com/%s' % name, permanent=False) return self.lastModified(rules.date) start = int(self.request.get('start', 0)) fetchNum = start + int(self.request.get('num', 20)) if fetchNum > 1000: self.error(412) return logs = memcache.get('changelog/%s' % name) if logs is None or len(logs) < fetchNum: diff = ChangeLog.gql("WHERE ruleList = :1 ORDER BY date DESC", rules).fetch(fetchNum) logs = map(generateLogFromDiff, diff) memcache.add('changelog/%s' % name, logs) self.response.headers['Content-Type'] = Feed.mime_type f = Feed(title="%s 更新记录" % name, link=self.request.relative_url(name), description="beta", language="zh") for item in logs: f.add_item(title="%d月%d日 %s 更新: 增加 %d 条, 删除 %d 条" % (item['timestamp'].month, item['timestamp'].day, name, len(item['block']), len(item['unblock'])), link='', description=template.render('changelogRssItem.html', **item), author_name="gfwlist", pubdate=item['timestamp']) f.write(self.response.out, 'utf-8')
def get(self, name): name = name.lower() rules = RuleList.getList(name) if rules is None: self.error(404) return # Conditional redirect to FeedBurner # @see: http://www.google.com/support/feedburner/bin/answer.py?hl=en&answer=78464 if(self.request.get('raw', None) is None and # http://host/path/name.rss?raw 'FeedBurner' not in self.request.user_agent): # FeedBurner fetcher self.redirect('http://feeds.feedburner.com/%s' % name, permanent=False) return self.lastModified(rules.date) start = int(self.request.get('start', 0)) fetchNum = start + int(self.request.get('num', 20)) if fetchNum > 1000: self.error(412) return logs = memcache.get('changelog/%s' % name) if logs is None or len(logs) < fetchNum: diff = ChangeLog.gql("WHERE ruleList = :1 ORDER BY date DESC", rules).fetch(fetchNum) logs = map(generateLogFromDiff, diff) memcache.add('changelog/%s' % name, logs) self.response.headers['Content-Type'] = Feed.mime_type f = Feed(title="%s 更新记录" % name, link=self.request.relative_url(name), description="beta", language="zh") for item in logs: f.add_item(title="%d月%d日 %s 更新: 增加 %d 条, 删除 %d 条" % (item['timestamp'].month, item['timestamp'].day, name, len(item['block']), len(item['unblock'])), link='', description=template.render('changelogRssItem.html', **item), author_name="gfwlist", pubdate=item['timestamp']) f.write(self.response.out, 'utf-8')