Ejemplo n.º 1
0
def process_statics():

    file_path_list = topic_sta1.getAllFile(weibofilefolder)
    topic_sta1.topic_followers(file_path_list)
Ejemplo n.º 2
0
def get_result_dic_2():
    output_dic = {}
    file_path_list = topic_sta1.getAllFile(weibofilefolder)
    output_dic = topic_sta1.city_keyword_emotion(file_path_list)
    return output_dic
Ejemplo n.º 3
0
def process_statics():
    file_path_list = topic_sta1.getAllFile(weibofilefolder)
    topic_sta1.topic_location(file_path_list)
Ejemplo n.º 4
0
def store_data():
    file_path_list = topic_sta1.getAllFile(weibofilefolder)
    topic_sta1.collect_city_file(file_path_list)
Ejemplo n.º 5
0
                            '好工作','平等,机会','白手起家','成为,富人','个体,自由','安享晚年','收入,足够','个人努力',
                            '祖国强大','中国经济,持续发展','父辈,更好']
    emotion = [-1,0,1,2,3,4]
    for current_emotion in emotion:
        for current_keyword in keywords_folder_list:
            current_folder = output_file_1 + str(current_emotion) + '/' + current_keyword
            if (not os.path.exists(current_folder)):
                os.makedirs(current_folder)



if __name__ == '__main__':

    # 不带情感
    # store_data()

    # 带时间_情感
    # make_dirs()
    #
    file_path_list = topic_sta1.getAllFile(weibofilefolder)
    # for i in file_path_list:
    #     t = threading.Thread(target=collect_time_emotion_city_file, args=(i,))
    #     t.start()

    pool = multiprocessing.Pool(processes=8)
    for i in file_path_list:
        pool.apply_async(topic_sta1.multi_collect_time_emotion_city_file_nospam,(i,))
    pool.close()
    pool.join()
    #统计各keyword 和城市的微博语料数
    # topic_sta1.calc_city_doc()
Ejemplo n.º 6
0
def get_data():
    file_path_list = topic_sta1.getAllFile(topic_sta1.weibofilefolder)
    # 输出结果
    topic_sta1.keyword_emotion_time(file_path_list)
Ejemplo n.º 7
0
def get_result_dic(filefolder):
    file_path_list = topic_sta1.getAllFile(filefolder)
    result_dic = topic_sta1.keyword_coOccurrence(file_path_list)
    print(result_dic)