def query_meituan_hs(): # offset = request.values.get('offset') # pagesize = request.values.get('pageSize') # dir(SnowNLP) # resp = modles.HsMeituanModel.query.offset(offset).limit(pagesize) # total = modles.HsMeituanModel.query.count() # # 评论总数 # # count = XymTripModel.query.count() # # 进行情感分析 # # cut = jieba.cut(comment, cut_all=False) # # print("Full Mode: " + "/ ".join(cut)) # 全模式 # lst = [] # print(dir(resp)) # for page in resp: # row = "" # # keywords = analyse.textrank(page.comment) # # for k in keywords: # # print(f'{k}//') # # seg = jieba.lcut(page.comment) # # print(seg) # s = SnowNLP(page.comment) # # 把一条评论进行分句 # for word in s.sentences: # # 分句之后每段词的情感度 # single = SnowNLP(word) # if single.sentiments <= 0.15: # row += f"<span style='color:red'>{word}</span>" # else: # row += word # finally_row = f"<p>{row}</p>" # lst.append(finally_row) data = comment_process(modles.HsMeituanModel.query) return custom_response.SuccessResponse(data=data[0], total=data[1])
def query_qunar_hs(): data = comment_process(modles.HsQunarModel.query) return custom_response.SuccessResponse(data=data[0], total=data[1])
def query_meituan_xym(): data = comment_process(modles.XymMeituanModel.query) return custom_response.SuccessResponse(data=data[0], total=data[1])
def analysis_xym(year): resp = analysis(XYMModel, year) return custom_response.SuccessResponse(data=resp)
def analysis_hs(year): resp = analysis(HuaShanModel, year) return custom_response.SuccessResponse(data=resp)
def resp_xym(): resp = resp_process(XYMModel) return custom_response.SuccessResponse(data=resp[0], total=resp[1])
def resp_hs(): resp = resp_process(HuaShanModel) return custom_response.SuccessResponse(data=resp[0], total=resp[1])
def query_xiec_xym(): data = comment_process(modles.XymXieChengModel.query) return custom_response.SuccessResponse(data=data[0], total=data[1])
def query_trip_xym(): data = comment_process(modles.XymTripModel.query) return custom_response.SuccessResponse(data=data[0], total=data[1])