def crawl_comment_by_page(mid, page_num): cur_url = BASE_URL.format(mid, page_num) html = get_page(cur_url, auth_level=1, is_ajax=True) comment_datas = comment.get_comment_list(html, mid) CommentOper.add_all(comment_datas) if page_num == 1: WbDataOper.set_weibo_comment_crawled(mid) return html, comment_datas
def crawl_comment_by_page(mid, page_num, seeion): try: cur_url = BASE_URL.format(mid, page_num) html = get_page(cur_url, auth_level=1, is_ajax=True) comment_datas, seed_ids = comment.get_comment_list(html, mid) except SoftTimeLimitExceeded: crawler.error( "comment SoftTimeLimitExceeded mid={mid} page_num={page_num}". format(mid=mid, page_num=page_num)) crawl_comment_by_page(mid, page_num) CommentOper.add_all(comment_datas, seeion) SeedidsOper.insert_seeds(seed_ids, seeion) if page_num == 1: WbDataOper.set_weibo_comment_crawled(mid, seeion) return html, comment_datas
def crawl_comment_by_page(mid, page_num): try: cur_url = BASE_URL.format(mid, page_num) html = get_page(cur_url, auth_level=1, is_ajax=True) comment_datas = comment.get_comment_list(html, mid) except SoftTimeLimitExceeded: crawler.error( "comment SoftTimeLimitExceeded mid={mid} page_num={page_num}". format(mid=mid, page_num=page_num)) app.send_task('tasks.comment.crawl_comment_by_page', args=(mid, page_num), queue='comment_page_crawler', routing_key='comment_page_info') CommentOper.add_all(comment_datas) if page_num == 1: WbDataOper.set_weibo_comment_crawled(mid) return html, comment_datas
def crawl_comment_by_page(mid, page_num): try: cur_url = BASE_URL.format(mid, page_num) html = get_page(cur_url, auth_level=1, is_ajax=True) comment_datas = comment.get_comment_list(html, mid) except SoftTimeLimitExceeded: crawler.error( "comment SoftTimeLimitExceeded mid={mid} page_num={page_num}". format(mid=mid, page_num=page_num)) app.send_task( 'tasks.comment.crawl_comment_by_page', args=(mid, page_num), queue='comment_page_crawler', routing_key='comment_page_info') CommentOper.add_all(comment_datas) if page_num == 1: WbDataOper.set_weibo_comment_crawled(mid) return html, comment_datas
from jieba.analyse import tfidf from aip import AipNlp from db.dao import CommentOper from config import get_baidu_args import xlwt import datetime import time if __name__ == '__main__': infos = CommentOper.get_all_comment_by_weibo_id(4244968959004196) args = get_baidu_args() API_ID = args['app_id'] API_KEY = args['api_key'] SECRET_KEY = args['secret_key'] client = AipNlp(str(API_ID), API_KEY, SECRET_KEY) book = xlwt.Workbook(encoding='utf-8', style_compression=0) sheet = book.add_sheet('test', cell_overwrite_ok=True) lists = ['positive_prob', 'confidence', 'negative_prob'] i = 0 for list in lists: sheet.write(0, i, list) i += 1 infos = CommentOper.get_all_comment_by_weibo_id(4244968959004196) i = 1 nowTime = datetime.datetime.now().strftime('%Y-%m-%d-%H-%M-%S') for info in infos: print(i) try: result = client.sentimentClassify(info.comment_cont) if result:
def test_comment_oper(self): db_session.execute("insert into {} ({}.comment_id) values ('".format( weibo_comment.name, weibo_comment.name) + FAKE_ID + "')") assert CommentOper.get_comment_by_id(FAKE_ID) is not None
from jieba.analyse import tfidf from db.dao import CommentOper from jieba import analyse if __name__ == '__main__': infos = CommentOper.get_all_comment_by_weibo_id(4081978523493142) test="" for info in infos: test+=info.comment_cont keyWords = tfidf(test) for keyWord in keyWords: print(keyWord)
def test_comment_oper(self): db_session.execute("insert into {} ({}.comment_id) values ('".format(weibo_comment.name, weibo_comment.name) + FAKE_ID + "')") assert CommentOper.get_comment_by_id(FAKE_ID) is not None