def locateChild(self, ctx, segments): if segments[0] == "" or segments[0] == "index.html": return self, [] if len(segments[0]) < 4 or len(segments[0]) > 20: return rend.NotFound elif segments[0] == "site": return dyn_site_root(), [] elif segments[0] == "signup": return signup(), [] elif segments[0] == "upgrade": return user_upgrade(), [] elif segments[0] == "free_account": return free_account(), [] elif segments[0] == "downloads": return downloads(), [] elif segments[0] == "qoop": return qoop(), segments[1:] if "reset_password" in segments[0]: request = inevow.IRequest(ctx) if request.args.has_key('username') and request.args.has_key( 'hash'): return reset_password(), [] else: return rend.NotFound if "quick_start_guide" in segments[0]: return quick_start_guide(), [] if "features" in segments[0]: return features(), [] if "developers" in segments[0]: return developers(), [] if "publish" in segments[0]: return user_publish("unknown"), segments[1:] if "community" in segments[0] and "feeds" in segments[1]: obj = user_homepage("") obj.username = "******" # have to hack this because the user_homepage ctor # lowercases it return obj, segments[1:] def act_check(count): if count: return user_homepage(segments[0]), segments[1:] else: if segments[1] == "img": return dyn_image_handler("noserve", self.app, self.log), segments[2:] else: return rend.NotFound d = self.app.api.users.check_exists('username', segments[0]) d.addCallback(act_check) return d
def locateChild(self, ctx, segments): if segments[0] == "" or segments[0] == "index.html": return self, [] if len(segments[0]) < 4 or len(segments[0]) > 20: return rend.NotFound elif segments[0] == "site": return dyn_site_root(), [] elif segments[0] == "signup": return signup(), [] elif segments[0] == "upgrade": return user_upgrade(), [] elif segments[0] == "free_account": return free_account(), [] elif segments[0] == "downloads": return downloads(), [] elif segments[0] == "qoop": return qoop(), segments[1:] if "reset_password" in segments[0]: request = inevow.IRequest(ctx) if request.args.has_key('username') and request.args.has_key('hash'): return reset_password(), [] else: return rend.NotFound if "quick_start_guide" in segments[0]: return quick_start_guide(), [] if "features" in segments[0]: return features(), [] if "developers" in segments[0]: return developers(), [] if "publish" in segments[0]: return user_publish("unknown"), segments[1:] if "community" in segments[0] and "feeds" in segments[1]: obj = user_homepage("") obj.username = "******" # have to hack this because the user_homepage ctor # lowercases it return obj, segments[1:] def act_check(count): if count: return user_homepage(segments[0]), segments[1:] else: if segments[1] == "img": return dyn_image_handler("noserve", self.app, self.log), segments[2:] else: return rend.NotFound d = self.app.api.users.check_exists('username', segments[0]) d.addCallback(act_check) return d
# -*- coding: utf-8 -*- # 下载课程信息到本地 # 程序入口 import downloads if __name__ == "__main__": downloads.downloads()
def main(query): if query in cache: return cache[query] json_output = [] article_list = [] csv_data = csv(query, author='', count=3)#, header=True) for line in csv_data: line = line.split('|') title = line[0] authors = line[7].split(', ') venue = line[8].rstrip() venue = venue.lstrip() version_urls = [line[1]] if len(line) >3 and int(line[3]) > 0: version_data = url_get(line[5], author='', count=20) time.sleep(random.uniform(2,5)) for subline in version_data: subline = subline.split('|') version_urls.append(subline[1]) article_list.append((authors, title, version_urls, venue)) toSort = [] for paper in article_list: tw = social_trends.SocialTrends(paper[0], paper[1], paper[2]) (score, tweets) = tw.paper_tweets() recent_downloads = downloads.downloads(paper[1]) if score > 0 or recent_downloads[0] > 0: json_paper = dict() json_paper['title'] = paper[1] json_paper['authors'] = paper[0] json_paper['urls'] = paper[2] json_paper['venue'] = paper[3] json_paper['tweets'] = [] for (tid, tweet) in tweets: json_tweet = dict() json_tweet['author'] = tweet['user']['screen_name'] json_tweet['content'] = tweet['text'] json_tweet['url'] = "http://twitter.com/"+tweet['user']['screen_name']+"/status/"+tweet['id_str'] json_paper['tweets'].append(json_tweet) toSort.append((score, recent_downloads[0], json_paper)) time.sleep(2) toJson= [y[2] for y in sorted(toSort, key=lambda x : x[0]*10000+x[1])] toReturn = json.dumps(toJson) cache[query] = toReturn pickle.dump(cache, open(PICKLE_FILE, 'wb')) return toReturn