def get_opinion(lang, lat, long, rad, name): search.work(str(lang), str(lat) + ',' + str(long) + ',' + str(int(rad)) + 'mi') news.getNews(name) opinion = sentiment.run_sentiment_analysis('tweets.txt') opinion += sentiment.run_sentiment_analysis('news.txt') # Check for + or - in front of opinion if opinion > 0: opinion = "+" + str(opinion) elif opinion < 0: opinion = str(opinion) else: opinion = str(opinion) # Make dictionary for send senti = [ { 'Language': lang, 'Latitude': lat, 'Longitude': long, 'Radius': rad, 'Opinion': opinion } ] logging.info("Opinion of city is: " + opinion) return jsonify(senti)
def write_processed_tweets(raw_tweet_file, keyword_file, output_file): partitioned_tweets = partition.partition(raw_tweet_file, keyword_file) sentiment_tweets = {} for candidate, tweets in partitioned_tweets.items(): sentiment_tweets[candidate] = sentiment.run_sentiment_analysis( tweets, 'words') with open(output_file, 'w') as data_file: data_file.write('{\n') first_candidate = True for candidate, tweets in sentiment_tweets.items(): if first_candidate: first_candidate = False else: data_file.write(',\n') data_file.write('"' + candidate + '": [\n') first_item = True for tweet in tweets: if first_item: first_item = False else: data_file.write(',\n') data_file.write(json.dumps(tweet)) data_file.write(']\n') data_file.write('}')
def get_opinion(lang, lat, long, rad, name): search.work(str(lang), str(lat) + ',' + str(long) + ',' + str(int(rad)) + 'mi') news.getNews(name) twitterOpinion = sentiment.run_sentiment_analysis('tweets.txt') newsOpinion = sentiment.run_sentiment_analysis('news.txt') visual = images.process_image_search(name) # Check for + or - in front of opinion if newsOpinion > 0: newsOpinion = "+" + str(newsOpinion) elif newsOpinion < 0: newsOpinion = str(newsOpinion) else: newsOpinion = str(newsOpinion) if twitterOpinion > 0: twitterOpinion = "+" + str(twitterOpinion) elif twitterOpinion < 0: twitterOpinion = str(twitterOpinion) else: twitterOpinion = str(twitterOpinion) # Make dictionary for send senti = [ { 'Language': lang, 'Latitude': lat, 'Longitude': long, 'Radius': rad, 'News Opinion': news, 'Twitter Opinion': twitterOpinion, 'Opinion': twitterOpinion + newsOpinion, 'Visual' : visual } ] logging.info("Opinion of city is: " + twitterOpinion + newsOpinion) return jsonify(senti)
def write_processed_tweets(raw_tweet_file, keyword_file, output_file): partitioned_tweets = partition.partition(raw_tweet_file, keyword_file) sentiment_tweets = {} for candidate, tweets in partitioned_tweets.items(): sentiment_tweets[candidate] = sentiment.run_sentiment_analysis(tweets, 'words') with open(output_file, 'w') as data_file: data_file.write('{\n') first_candidate = True for candidate, tweets in sentiment_tweets.items(): if first_candidate: first_candidate = False else: data_file.write(',\n') data_file.write('"' + candidate + '": [\n') first_item = True for tweet in tweets: if first_item: first_item = False else: data_file.write(',\n') data_file.write(json.dumps(tweet)) data_file.write(']\n') data_file.write('}')
def run_analysis(filename, keyword_file, print_all = True): partitioned_tweets = partition.partition(filename, keyword_file) analyzed_tweets = {} for candidate, tweets in partitioned_tweets.items(): analyzed_tweets[candidate] = sentiment.run_sentiment_analysis(tweets, 'words') predict.predict(analyzed_tweets, print_all)