def test_get_matplotlib_tweets(self): """ Check if we get the expected results for bioconductor. """ term = "matplotlib" client = TwitterClient() results = client.get_tweets(term) client.write_results(results) assert len(results) == 49
def input_text(): if request.method == "POST": print(request.form) data = request.form['submitText'] print(data) query = data # creating object of TwitterClient Class api = TwitterClient() # calling function to get tweets # for i in range(2): tweets = api.get_tweets(query = query, count = 100) langP = LangProcessor() pos = langP.get_pos(query, tweets) text = "corpus.txt" langP.addtoCorpus(text, tweets) list_of_lists = [] with open("corpus.txt", "r") as f: for line in f: list_of_lists.append(line.split()) model = langP.build_model(list_of_lists, query) similars = [] similars.append(model[0][0]) similars.append(model[1][0]) similars.append(model[2][0]) definition = langP.generateDefinition(query, pos, similars) print(definition) pos_display = '' if 'NN' in pos: pos_display = 'noun' if 'VB' in pos: pos_display = 'verb' # if 'RB' in pos: # pos_display = # import pdb;pdb.set_trace() # return jsonify(isError=False, # message="Success", # statusCode=200, # stuff=data # ), 200 return render_template("define.html", definition=definition)
def main(): # input for term to be searched and how many tweets to search tools = Tools() validInput = True while True: print("|******************************************|") print("|Welcome to Sentimental Analysis on Twitter|") print("|******************************************|") searchTerm = input("Enter Keyword/Tag to search about: ").replace(" ", "") if searchTerm.isnumeric(): print("Please enter a valid keyword") continue elif len(searchTerm) <= 3: print("Please enter a keyword with more than 3 characters") continue while True: noOfTerms = input("Enter how many tweets to search: (< 500): ") if noOfTerms.isnumeric(): if int(noOfTerms) > 500: print("Defaulting to number of tweets to 500") noOfTerms = int(500) else: print("Please enter a numeric value < 500.") continue twitterClient = TwitterClient() rawTweets = twitterClient.get_tweets(searchTerm, int(noOfTerms)) # clean the tweets before adding to the dictionary if len(rawTweets) != 0: cleanedTweets = tools.clean_tweet(rawTweets, searchTerm) dataAnalysis = DataAnalysis(cleanedTweets) tweetSentiment = dataAnalysis.sentimentAnalysis(searchTerm) tweetsDict = dict() for tweetId in cleanedTweets: tweetLst = list() tweetLst.append(rawTweets[tweetId]) tweetLst.append(cleanedTweets[tweetId]) tweetLst.append(tweetSentiment[tweetId]) tweetsDict[tweetId] = tweetLst tools.write_csv(searchTerm + '.csv', 'w', tweetsDict) validInput = True break else: print("No tweets found. Please try searching another trending keyword.\n") validInput = False break if not validInput: continue while True: ch = input("Choose from the below options:\n\t1. Pie Chart\n\t2. Word Cloud\n\t3. Search another " "keyword\n\t4. Exit\nEnter your choice: ") if ch == '1': print("Data visualisation in Pie Chart") print("Loading...") dataAnalysis.generatePieChart(searchTerm) elif ch == '2': print("Data visualisation in Word Cloud\n") print("Loading...") dataAnalysis.generateWordCloud(searchTerm) elif ch == '3': break elif ch == '4': print("Thank You. Good Bye!") exit(0) else: print("Incorrect choice. Please re enter your choice.\n") continue
from components import sources from components.method import method_page from components.dashboards import create_dashboards from components.tweetcard import create_deck import json with open("locations.json", "r") as location_file: locations = json.load(location_file) api = TwitterClient() tweets = [] for location in locations: geocode = str(location['geocode']) city_tweets = api.get_tweets(query='blm', count=20, geocode=geocode, city=location['city']) tweets = tweets + city_tweets # picking positive tweets from tweets # ptweets = [tweet for tweet in tweets if tweet['sentiment'] == 'positive'] # percentage of positive tweets # print("Positive tweets percentage: {} %".format(100 * len(ptweets) / len(tweets))) # picking negative tweets from tweets # ntweets = [tweet for tweet in tweets if tweet['sentiment'] == 'negative'] # percentage of negative tweets # print("Negative tweets percentage: {} %".format(100 * len(ntweets) / len(tweets))) # percentage of neutral tweets # print("Neutral tweets percentage: {} % \ # ".format(100 * (len(tweets) - (len(ntweets) + len(ptweets))) / len(tweets)))
def api(query): query_limit = int(os.getenv('QUERY_LIMIT')) api = TwitterClient() try: tweets = api.get_tweets(query, query_limit) except TweepError as e: return jsonify({ "status_code": 429, "message": "Too many requests. Try again later" }) if len(tweets) == 0: return jsonify({"status_code": 400, "message": "Not a valid query"}) positive = 0 negative = 0 neutral = 0 positive_tweets = [] negative_tweets = [] neutral_tweets = [] for tweet in tweets: sentiment = get_sentiment(tweet['text']) if sentiment == 1: tweet['sentiment'] = 'positive' positive += 1 positive_tweets.append(tweet) elif sentiment == -1: tweet['sentiment'] = 'negative' negative += 1 negative_tweets.append(tweet) else: tweet['sentiment'] = 'neutral' neutral += 1 neutral_tweets.append(tweet) total_per = positive + negative + neutral positive_per = round(((positive / total_per) * 100), 2) negative_per = round(((negative / total_per) * 100), 2) neutral_per = round(((neutral / total_per) * 100), 2) mean_total = positive + negative positive_mean = round(((positive / mean_total) * 100), 2) negative_mean = round(((negative / mean_total) * 100), 2) positive_tweets = sorted(positive_tweets, key=itemgetter('retweet_count'), reverse=True) negative_tweets = sorted(negative_tweets, key=itemgetter('retweet_count'), reverse=True) neutral_tweets = sorted(neutral_tweets, key=itemgetter('retweet_count'), reverse=True) positive_word_count = get_word_count(positive_tweets, query) negative_word_count = get_word_count(negative_tweets, query) neutral_word_count = get_word_count(neutral_tweets, query) sentiment = '' if abs(positive_mean - negative_mean) < 10.0: sentiment = 'Controversial' elif positive_mean > negative_mean: sentiment = 'Positive' else: sentiment = 'Negative' WOE_ID = 1 trending = api.get_trending(WOE_ID) return jsonify({ 'sentiment': sentiment, 'count': { 'positive': positive, 'negative': negative, 'neutral': neutral, 'total': total_per }, 'mean': { 'positive': positive_mean, 'negative': negative_mean }, 'results': { 'positive': positive_per, 'negative': negative_per, 'neutral': neutral_per }, 'status_code': 200, 'message': 'Request Successful!', 'trending': trending[:5], 'tweets': { 'positive_tweets': positive_tweets[:5], 'negative_tweets': negative_tweets[:5], 'neutral_tweets': neutral_tweets[:5] }, 'word_count': { 'positive': positive_word_count, 'negative': negative_word_count, 'neutral': neutral_word_count }, 'query': query.title() })
def main(): twitterClient = TwitterClient() while True: tweets = twitterClient.get_tweets_sentiments(twitterClient.get_tweets()) twitterClient.retweet(tweets) time.sleep(30)