Exemplo n.º 1
0
def getTweets_condensed():
    v2 = "process_tweets_db.json"
    with open('recent_tweets.json', 'r+', encoding='utf8') as tweet_handle:
        for data_item in tweet_handle:
            tweetDetails = json.loads(data_item)
            stream_condenser = Condenser_Tweets(tweetDetails)
            if stream_condenser.text == '' or stream_condenser.location[
                    0] == -1 or stream_condenser.location[1] == -1:
                continue
            else:
                data = stream_condenser.updatedJson()
                write_file(v2, data)
            print("Stream processed with Stream")
def get_historic_condensed():
    v2 = "historic_dbdata.json"
    with open('1_2018.json', 'r+', encoding='utf8') as tweetFile:
        for lineNum, line in enumerate(tweetFile):
            if lineNum > 0:
                if "coordinates" in str(line):
                    if str(line[-2]) == ',':
                        line = line[:-2]
                    elif str(line[-3:-1]) == "]}":
                        break
                    tweetDetails = json.loads(line)
                    search_condenser = Condenser_Tweets(tweetDetails)
                    data = search_condenser.updatedJson()
                    write_file(v2, data)
def get_historic_condensed():
    Path('historic_dbdata.json').touch()
    v2 = 'historic_dbdata.json'
    with open('twitter-melb.json', 'r+', encoding='utf8') as tweetFile:
        for lineNum, line in enumerate(tweetFile):
            if lineNum > 355000:
                if str(line[-2]) == ',':
                    tweetDetails = json.loads(line[:-2])
                    search_condenser = Condenser_Tweets(tweetDetails)
                    if search_condenser.text == '' or search_condenser.location[
                            0] == -1 or search_condenser.location[1] == -1:
                        continue
                    else:
                        data = search_condenser.updatedJson()
                        write_file(v2, data)
def getTweets_condensed():
    v2 = "process_tweets_db.json"
    with open('recent_tweets.json', 'r+', encoding='utf8') as tweet_handle:
        for data_item in tweet_handle:
            tweetDetails = json.loads(data_item)
            stream_condenser = Condenser_Tweets(tweetDetails)
            data = stream_condenser.updatedJson()
            write_file(v2, data)
            print("Stream processed with Stream")

    v3 = "user_tweets_db.json"
    with open('user_list_timeline.json', 'r+',
              encoding='utf8') as tweet_handle:
        for data_item in tweet_handle:
            tweetDetails = json.loads(data_item)
            search_condenser = Condenser_Tweets(tweetDetails)
            data = search_condenser.updatedJson()
            write_file(v3, data)
            print("Search processed tweets for database")
Exemplo n.º 5
0
Created on Tue May  5 23:47:49 2020

@author: Sania Khan
"""

import json
from Condenser import Condenser_Tweets

def write_file(file, data):
    outfile = open(file, 'a+')
    outfile.write(json.dumps(data) + "\n")
    outfile.close()
v2 = "process_tweets_db.json" 
   
if __name__ == "__main__":
    with open ('recent_tweets.json', 'r+', encoding='utf8') as tweet_handle:
                for data_item in tweet_handle:
                    tweetDetails = json.loads(data_item)
                    stream_condenser = Condenser_Tweets(tweetDetails)
                    data = stream_condenser.updatedJson()
                    with open(v2) as f:
                      write_file(v2, data)  
                      
                      
    with open ('user_list_timeline.json', 'r+', encoding='utf8') as tweet_handle:
            for data_item in tweet_handle:
                tweetDetails = json.loads(data_item)
                search_condenser = Condenser_Tweets(tweetDetails)
                data = search_condenser.updatedJson()
                with open(v2) as f:
                      write_file(v2, data)