def crawl_comment_by_page(mid, page_num): cur_url = BASE_URL.format(mid, page_num) html = get_page(cur_url, auth_level=1, is_ajax=True) comment_datas = comment.get_comment_list(html, mid) CommentOper.add_all(comment_datas) if page_num == 1: WbDataOper.set_weibo_comment_crawled(mid) return html, comment_datas
def test_weibodata_oper(self): db_session.execute("insert into {} ({}.weibo_id) values ('".format( weibo_data.name, weibo_data.name) + FAKE_ID + "')") assert WbDataOper.get_wb_by_mid(FAKE_ID) is not None assert len(WbDataOper.get_weibo_comment_not_crawled()) == 1 assert len(WbDataOper.get_weibo_repost_not_crawled()) == 1 WbDataOper.set_weibo_comment_crawled(FAKE_ID) WbDataOper.set_weibo_repost_crawled(FAKE_ID) assert len(WbDataOper.get_weibo_comment_not_crawled()) == 0 assert len(WbDataOper.get_weibo_repost_not_crawled()) == 0
def test_weibodata_oper(self): db_session.execute("insert into {} ({}.weibo_id) values ('".format(weibo_data.name, weibo_data.name) + FAKE_ID + "')") assert WbDataOper.get_wb_by_mid(FAKE_ID) is not None assert len(WbDataOper.get_weibo_comment_not_crawled()) == 1 assert len(WbDataOper.get_weibo_repost_not_crawled()) == 1 WbDataOper.set_weibo_comment_crawled(FAKE_ID) WbDataOper.set_weibo_repost_crawled(FAKE_ID) assert len(WbDataOper.get_weibo_comment_not_crawled()) == 0 assert len(WbDataOper.get_weibo_repost_not_crawled()) == 0
def crawl_comment_by_page(mid, page_num, seeion): try: cur_url = BASE_URL.format(mid, page_num) html = get_page(cur_url, auth_level=1, is_ajax=True) comment_datas, seed_ids = comment.get_comment_list(html, mid) except SoftTimeLimitExceeded: crawler.error( "comment SoftTimeLimitExceeded mid={mid} page_num={page_num}". format(mid=mid, page_num=page_num)) crawl_comment_by_page(mid, page_num) CommentOper.add_all(comment_datas, seeion) SeedidsOper.insert_seeds(seed_ids, seeion) if page_num == 1: WbDataOper.set_weibo_comment_crawled(mid, seeion) return html, comment_datas
def crawl_comment_by_page(mid, page_num): try: cur_url = BASE_URL.format(mid, page_num) html = get_page(cur_url, auth_level=1, is_ajax=True) comment_datas = comment.get_comment_list(html, mid) except SoftTimeLimitExceeded: crawler.error( "comment SoftTimeLimitExceeded mid={mid} page_num={page_num}". format(mid=mid, page_num=page_num)) app.send_task('tasks.comment.crawl_comment_by_page', args=(mid, page_num), queue='comment_page_crawler', routing_key='comment_page_info') CommentOper.add_all(comment_datas) if page_num == 1: WbDataOper.set_weibo_comment_crawled(mid) return html, comment_datas
def crawl_comment_by_page(mid, page_num): try: cur_url = BASE_URL.format(mid, page_num) html = get_page(cur_url, auth_level=1, is_ajax=True) comment_datas = comment.get_comment_list(html, mid) except SoftTimeLimitExceeded: crawler.error( "comment SoftTimeLimitExceeded mid={mid} page_num={page_num}". format(mid=mid, page_num=page_num)) app.send_task( 'tasks.comment.crawl_comment_by_page', args=(mid, page_num), queue='comment_page_crawler', routing_key='comment_page_info') CommentOper.add_all(comment_datas) if page_num == 1: WbDataOper.set_weibo_comment_crawled(mid) return html, comment_datas