Exemplo n.º 1
0
from collect import crawler
from analysis import analizer
from visualize import visualizer

pagename = "jtbcnews"
from_date = "2018-05-01"
to_date = "2018-05-24"

if __name__ == "__main__":
    # postList = crawler.fb_get_post_list(pagename, from_date, to_date)
    # print(postList)

    dataString = analizer.json_to_str("D:/fb/jtbcnews.json", 'comments_str')
    count_data = analizer.count_wordfreq(dataString)
    print(count_data.most_common(20))  # most_common(20) 상위 20위까지만 표시하게끔 해줌

    # dictWords = dict(count_data.most_common(20))
    # visualizer.show_graph_bar(dictWords, pagename)

    dictWords = dict(count_data.most_common(20))
    visualizer.wordcloud(dictWords, pagename)
Exemplo n.º 2
0
from collect import crawler
from analysis import analizer
from visualize import visualizer

pagename = "chosun"
from_date = "2018-05-22"
to_date = "2018-05-24"

if __name__ == "__main__":
    # #수집
    # postList = crawler.fb_get_post_list(pagename,from_date,to_date)
    # print(postList)

    #분석
    dataString = analizer.json_to_str("D:/JavaStudy/imformation/chosun.json",
                                      "message_str")
    count_data = analizer.count_wordfreq(dataString)
    print(count_data)
    dictword = dict(count_data.most_common(20))

    # 그래프
    visualizer.show_graph_bar(dictword, pagename)

    # 워드클라우드
    visualizer.wordcloud(dictword, pagename)
Exemplo n.º 3
0
from analysis.analizer import json_to_str, count_wordfreq
from collect.crawler import fb_get_post_list
from visualize.visualizer import show_gragh_bar, word_cloud

pagename = "BlizzHeroesKR"
# pagename = "WarcraftKR"
# pagename = "BlizzardKR"
# pagename = "jtbcnews"
# pagename = "chosun"
from_date = "2016-10-10"
to_date = "2018-05-23"
file_path = "D:/Bigdata/facebook/%s.json" % pagename

if __name__ == '__main__':
    #수집
    fb_get_post_list(pagename, from_date, to_date)

    #분석
    data_string = json_to_str(file_path, "message_str")
    count_data = count_wordfreq(data_string)
    dict_word = dict(count_data.most_common(35))

    #그래프
    show_gragh_bar(dict_word, pagename)
    word_cloud(dict_word, pagename)
Exemplo n.º 4
0
from collect import crawler as cr
from analysis import analizer as an
from visualize import visualizer as vi
import simplejson

pagename = "TheHeraldBusiness"
from_date = "2018-04-01"
to_date = "2018-05-29"

if __name__ == "__main__":

    #수집
    postList = cr.fb_get_post_list(pagename, from_date, to_date)
    print(postList)

    #분석
    dataString = an.json_to_str(
        "/Users/WOOSEUNGMI/Desktop/2018/javaStudy/facebook/TheHeraldBusiness.json",
        "message_str")  # 파일경로+경로명, key값(dic의)
    count_data = an.count_wordfreq(dataString)
    print(count_data)  #어떤 단어를 몇번 이용했는지 출력
    #리스트를 딕셔너리 형태로 변경
    dictWord = dict(count_data.most_common(20))  #단어 상위 몇개만 지정

    #그래프
    vi.show_graph_bar(dictWord, pagename)  #폰트네임알아내기

    # 워드크라우드
    vi.wordcloud(dictWord, pagename)
Exemplo n.º 5
0
from collect import crawler
from analysis import analizer
from visualize import visualizer

pagename = "tvchosunnews"
from_date = "2017-01-01"
to_date = "2018-10-31"

if __name__=="__main__":

    # 수집
    postList = crawler.fb_get_post_list(pagename, from_date, to_date)
    print(postList)

    #분석
    dataString = analizer.json_to_str("D:/javaStudy/facebook/jtbcnews.json", "message_str")
    count_data = analizer.count_wordfreq(dataString)
    print(count_data)
    dictWord = dict(count_data.most_common(40))

    # 그래프
    visualizer.show_graph_bar(dictWord, pagename)
    visualizer.wordcloud(dictWord, pagename)
Exemplo n.º 6
0
from collect import crawler
from analysis import analizer
from visualize import visualizer

#pagename = "jtbcnews"
pagename = "chosun"
from_date = "2017-01-01"
to_date = "2017-10-31"

if __name__ == '__main__':

    #수집 저장
    postList = crawler.fb_get_post_list(pagename, from_date, to_date)
    print(postList)

    #분석
    datastring = analizer.json_to_str("D:/spring/fb/%s.json" % pagename,
                                      "message_str")
    count_data = analizer.count_wordfreq(datastring)
    print(count_data)  #list 데이터를 가지고 그림을 그릴것이다.ㅋㅋ
    dictWord = dict(count_data.most_common(20))  #list 딕셔너리 형태로 형변환

    #그래프
    visualizer.show_graph_bar(dictWord, pagename)
    #워드크라우드
    visualizer.worldcloud(dictWord, pagename)
Exemplo n.º 7
0
from visualize import visualizer

pagename = "chosun"
from_date = "2017-01-01"
to_date = "2018-05-23"
path = "D:/javaStudy/"
f_ex = ".json"
filename = path + pagename + f_ex

if __name__ == "__main__":
    # 수집
    # postList = crawler.fb_get_post_list(pagename, from_date, to_date)
    # print(postList)

    # 분석
    dataString = analizer.json_to_str(filename, "message_str")
    count_data = analizer.count_wordfreq(dataString)

    # with open("d:/javaStudy/analysis_" + f_name + ".json", 'w', encoding='utf-8') as outfile:
    #     json_string = json.dumps(count_data, indent=4, sort_keys=True, ensure_ascii=False)
    #     outfile.write(json_string)

    print("카운트데이터 : ", count_data)

    dictWord1 = dict(count_data.most_common(20))
    dictWord2 = dict(count_data.most_common(50))

    # 그래프
    visualizer.show_graph_bar(dictWord1, pagename)
    visualizer.wordcloud(dictWord2, pagename)
Exemplo n.º 8
0
from collect import crawler
from analysis import analizer
from visualize import visualizer

pagename = "chosun"
from_date = "2018-02-09"
to_date = "2018-02-30"

if __name__ == "__main__":

    # 수집
    result = crawler.fb_get_post_list(pagename, from_date, to_date)
    print(result)

    # 분석
    dataString = analizer.json_to_str("D:\javaStudy/facebook/chosun.json",
                                      "message")
    # print(dataString)
    dictWords = analizer.count_wordfreq(dataString)
    #count_data = analizer.count_wordfreq(dataString)
    #print(count_data)
    #dictWords = dict(count_data.most_common(20))
    print(type(dictWords))
    print(dictWords)
    # 그래프
    visualizer.show_graph_bar(dictWords, pagename)
    visualizer.wordcloud(dictWords, pagename)
Exemplo n.º 9
0
from collect import crawler
from analysis import analizer
from visualize import visualizer
pagename = "chosun"
from_date = "2018-05-01"
to_date = "2018-05-24"

if __name__=="__main__":
    #수집
    result=crawler.fb_get_post_list(pagename,from_date,to_date)
    print(result)
    #분석
    dataString = analizer.json_to_str("d:/" + pagename + ".json", 'message_str')
    data = analizer.count_wordfreq(dataString)
    dictWords = dict(data.most_common(20))
    print(data)
    #그래프
    visualizer.show_graph_bar(dictWords,pagename)
    #워드클라우드
    visualizer.wordcloud(dictWords, pagename)
Exemplo n.º 10
0
from collect import crawler
from analysis import analizer
from visualize import visualizer

pagename = "channelanews"
from_date = "2018-05-30"
to_date = "2018-05-31"

if __name__ == "__main__":

    #수집
    postList = crawler.fb_get_post_list(pagename, from_date, to_date)
    print(postList)

    #분석
    dataString = analizer.json_to_str(
        "C:/Users/aran0/Desktop/BIT/python/facebook/channelanews.json",
        "message_str")
    count_data = analizer.count_wordfreq(dataString)
    print(count_data)

    dictWord = dict(count_data.most_common(20))

    #그래프
    visualizer.show_graph_bar(dictWord, pagename)
    visualizer.wordcloud(dictWord, pagename)