def initTweetStreaming(): # creating object of TwitterClient Class api = TwitterClient() myStreamListener = MyStreamListener() try: myStream = tweepy.Stream(auth=api.auth, listener=myStreamListener) myStream.filter(track=charSets.filterSet, stall_warnings=True, is_async=True, languages=["en"]) # various exception handling blocks except KeyboardInterrupt: sys.exit() except AttributeError as e: print('AttributeError') pass except tweepy.TweepError as e: print('Exception') print(e) if '401' in e: print('401 exception response') print(e) sleep(60) pass else: #raise an exception if another status code was returned, we don't like other kinds raise e except Exception as e: print('Unhandled exception') raise e
def test_twitter_client(): client = TwitterClient() latest_tweet = client.get_latest_tweet() assert_not_none(latest_tweet) assert_true("#Bitcoin" in latest_tweet)
def test_get_matplotlib_tweets(self): """ Check if we get the expected results for bioconductor. """ term = "matplotlib" client = TwitterClient() results = client.get_tweets(term) client.write_results(results) assert len(results) == 49
def search(): print(f"Method called {request.method} {request.form}") query = request.form['query'] # query = "trump" count = 200 print(f"Query {query}") twitterclientObj = TwitterClient() jsonoutput = twitterclientObj.main(query, count) jsonoutput["query"] = query return render_template('result.html', title="page", data=jsonoutput)
def plot2(self): """ plot that generates likes and retweets of followers as a function of time (hours) """ twitter_client = TwitterClient() tweet_analyzer = TweetAnalyzer() api = twitter_client.get_twitter_client_api() tweets = api.user_timeline(screen_name=self.getUser_name()) df = tweet_analyzer.tweets_to_data_frame2(tweets) time_likes = pd.Series(data=df['likes'].values, index=df['date']) time_likes.plot(figsize=(15, 5), label="likes", legend=True) time_retweets = pd.Series(data=df['retweets'].values, index=df['date']) time_retweets.plot(figsize=(15, 5), label="retweets", legend=True) plt.show()
def input_text(): if request.method == "POST": print(request.form) data = request.form['submitText'] print(data) query = data # creating object of TwitterClient Class api = TwitterClient() # calling function to get tweets # for i in range(2): tweets = api.get_tweets(query = query, count = 100) langP = LangProcessor() pos = langP.get_pos(query, tweets) text = "corpus.txt" langP.addtoCorpus(text, tweets) list_of_lists = [] with open("corpus.txt", "r") as f: for line in f: list_of_lists.append(line.split()) model = langP.build_model(list_of_lists, query) similars = [] similars.append(model[0][0]) similars.append(model[1][0]) similars.append(model[2][0]) definition = langP.generateDefinition(query, pos, similars) print(definition) pos_display = '' if 'NN' in pos: pos_display = 'noun' if 'VB' in pos: pos_display = 'verb' # if 'RB' in pos: # pos_display = # import pdb;pdb.set_trace() # return jsonify(isError=False, # message="Success", # statusCode=200, # stuff=data # ), 200 return render_template("define.html", definition=definition)
def __init__(self, rbot): self.reply_q = queue.PriorityQueue() self.job_q = queue.PriorityQueue() reddit_workers = RedditWorkers(rbot, self.job_q, self.reply_q) tw_client = TwitterClient() job_handler_worker = JobHandlerWorker(VisionOCR, tw_client, self.job_q, self.reply_q) self.reply_worker_t = Thread(target=reddit_workers.reply_worker, daemon=True) self.notif_listener_t = Thread(target=reddit_workers.notif_listener, daemon=True) self.sub_feed_listener_t = Thread( target=reddit_workers.sub_feed_listener, daemon=True) self.score_listener_t = Thread(target=reddit_workers.score_listener, daemon=True) self.job_handler_t = Thread(target=job_handler_worker.job_handler, daemon=True)
def __init__(self): self.twitter_client = TwitterClient().get_client() self.weather_requester = WeatherRequester() self.logger = logging.getLogger('HottestCityBot')
def main(): # input for term to be searched and how many tweets to search tools = Tools() validInput = True while True: print("|******************************************|") print("|Welcome to Sentimental Analysis on Twitter|") print("|******************************************|") searchTerm = input("Enter Keyword/Tag to search about: ").replace(" ", "") if searchTerm.isnumeric(): print("Please enter a valid keyword") continue elif len(searchTerm) <= 3: print("Please enter a keyword with more than 3 characters") continue while True: noOfTerms = input("Enter how many tweets to search: (< 500): ") if noOfTerms.isnumeric(): if int(noOfTerms) > 500: print("Defaulting to number of tweets to 500") noOfTerms = int(500) else: print("Please enter a numeric value < 500.") continue twitterClient = TwitterClient() rawTweets = twitterClient.get_tweets(searchTerm, int(noOfTerms)) # clean the tweets before adding to the dictionary if len(rawTweets) != 0: cleanedTweets = tools.clean_tweet(rawTweets, searchTerm) dataAnalysis = DataAnalysis(cleanedTweets) tweetSentiment = dataAnalysis.sentimentAnalysis(searchTerm) tweetsDict = dict() for tweetId in cleanedTweets: tweetLst = list() tweetLst.append(rawTweets[tweetId]) tweetLst.append(cleanedTweets[tweetId]) tweetLst.append(tweetSentiment[tweetId]) tweetsDict[tweetId] = tweetLst tools.write_csv(searchTerm + '.csv', 'w', tweetsDict) validInput = True break else: print("No tweets found. Please try searching another trending keyword.\n") validInput = False break if not validInput: continue while True: ch = input("Choose from the below options:\n\t1. Pie Chart\n\t2. Word Cloud\n\t3. Search another " "keyword\n\t4. Exit\nEnter your choice: ") if ch == '1': print("Data visualisation in Pie Chart") print("Loading...") dataAnalysis.generatePieChart(searchTerm) elif ch == '2': print("Data visualisation in Word Cloud\n") print("Loading...") dataAnalysis.generateWordCloud(searchTerm) elif ch == '3': break elif ch == '4': print("Thank You. Good Bye!") exit(0) else: print("Incorrect choice. Please re enter your choice.\n") continue
def process_review_data(spark, output_data): #create conncetin to Twitter api = TwitterClient()
from TwitterClient import TwitterClient from flask import Flask, request, jsonify from flask_cors import CORS tc = TwitterClient() app = Flask(__name__) CORS(app) @app.route('/twittersentiment', methods=['POST']) def twittersentiment(): topic = request.get_json() tweets = tc.get_tweets(topic['query'], topic['count']) if len(tweets) == 0: message = {'message': 'No tweets found on that topic'} return jsonify(message) else: ptweets = [ tweet for tweet in tweets if tweet['sentiment'] == 'positive' ] ntweets = [ tweet for tweet in tweets if tweet['sentiment'] == 'negative' ] statistics = { 'positiveTweets': { 'tweets': ptweets, 'percentage': len(ptweets) / len(tweets) * 100 }, 'negativeTweets': { 'tweets': ntweets,
def run(): bitcoin_client = BitcoinClient() twitter_client = TwitterClient() bot = TwitterBot(bitcoin_client, twitter_client) bot.run()
def __init__(self): self.twitter_client = TwitterClient() self.api = self.twitter_client.getTwitterClientAPI() self.accounts = self.read_file() self.results = [] self.accounts_counter = 0
import dash_core_components as dcc import dash_html_components as html import dash_bootstrap_components as dbc from components import nav from components import index from components import sources from components.method import method_page from components.dashboards import create_dashboards from components.tweetcard import create_deck import json with open("locations.json", "r") as location_file: locations = json.load(location_file) api = TwitterClient() tweets = [] for location in locations: geocode = str(location['geocode']) city_tweets = api.get_tweets(query='blm', count=20, geocode=geocode, city=location['city']) tweets = tweets + city_tweets # picking positive tweets from tweets # ptweets = [tweet for tweet in tweets if tweet['sentiment'] == 'positive'] # percentage of positive tweets # print("Positive tweets percentage: {} %".format(100 * len(ptweets) / len(tweets))) # picking negative tweets from tweets # ntweets = [tweet for tweet in tweets if tweet['sentiment'] == 'negative']
def api(query): query_limit = int(os.getenv('QUERY_LIMIT')) api = TwitterClient() try: tweets = api.get_tweets(query, query_limit) except TweepError as e: return jsonify({ "status_code": 429, "message": "Too many requests. Try again later" }) if len(tweets) == 0: return jsonify({"status_code": 400, "message": "Not a valid query"}) positive = 0 negative = 0 neutral = 0 positive_tweets = [] negative_tweets = [] neutral_tweets = [] for tweet in tweets: sentiment = get_sentiment(tweet['text']) if sentiment == 1: tweet['sentiment'] = 'positive' positive += 1 positive_tweets.append(tweet) elif sentiment == -1: tweet['sentiment'] = 'negative' negative += 1 negative_tweets.append(tweet) else: tweet['sentiment'] = 'neutral' neutral += 1 neutral_tweets.append(tweet) total_per = positive + negative + neutral positive_per = round(((positive / total_per) * 100), 2) negative_per = round(((negative / total_per) * 100), 2) neutral_per = round(((neutral / total_per) * 100), 2) mean_total = positive + negative positive_mean = round(((positive / mean_total) * 100), 2) negative_mean = round(((negative / mean_total) * 100), 2) positive_tweets = sorted(positive_tweets, key=itemgetter('retweet_count'), reverse=True) negative_tweets = sorted(negative_tweets, key=itemgetter('retweet_count'), reverse=True) neutral_tweets = sorted(neutral_tweets, key=itemgetter('retweet_count'), reverse=True) positive_word_count = get_word_count(positive_tweets, query) negative_word_count = get_word_count(negative_tweets, query) neutral_word_count = get_word_count(neutral_tweets, query) sentiment = '' if abs(positive_mean - negative_mean) < 10.0: sentiment = 'Controversial' elif positive_mean > negative_mean: sentiment = 'Positive' else: sentiment = 'Negative' WOE_ID = 1 trending = api.get_trending(WOE_ID) return jsonify({ 'sentiment': sentiment, 'count': { 'positive': positive, 'negative': negative, 'neutral': neutral, 'total': total_per }, 'mean': { 'positive': positive_mean, 'negative': negative_mean }, 'results': { 'positive': positive_per, 'negative': negative_per, 'neutral': neutral_per }, 'status_code': 200, 'message': 'Request Successful!', 'trending': trending[:5], 'tweets': { 'positive_tweets': positive_tweets[:5], 'negative_tweets': negative_tweets[:5], 'neutral_tweets': neutral_tweets[:5] }, 'word_count': { 'positive': positive_word_count, 'negative': negative_word_count, 'neutral': neutral_word_count }, 'query': query.title() })
def main(): twitterClient = TwitterClient() while True: tweets = twitterClient.get_tweets_sentiments(twitterClient.get_tweets()) twitterClient.retweet(tweets) time.sleep(30)
def __init__(self, trending): self.trending = trending self.tc = TwitterClient() self.start_twitter_stream() thread = Thread(target=self.start_stock_twits_stream) thread.start()
from TwitterClient import TwitterClient # Keys for Tweepy TwitterClient CONSUMER_KEY = 'fKmzykMYS2SfPSmHNyNI5dT9a' CONSUMER_SECRET = 'Nq2r9E1L8rx4OQ8xIEzvFAfpVnSy4pp7Fva5QJleyOzUqg46jC' ACCESS_TOKEN = '4608889037-DuDZdgi1wXcJTj5XI2l93SkHx5vnimmIa6fqOEh' ACCESS_TOKEN_SECRET = 'ylFmbrAu9PuoweHwPx3X1gw158nO3jTD94fuDiL446phw' TWEET_FILEPATH = "/Users/declanjones/Desktop/TweetProj/Tweet_Data/Tweets/tweet.txt" # Initialize Tweepy Twitter Client client = TwitterClient(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN, ACCESS_TOKEN_SECRET) client.scrape() # Get tweets to tweet from tweet.txt f = open(TWEET_FILEPATH, 'rb') allTweets = f.readlines() nextTweet = '' if allTweets: nextTweet = allTweets[0] f.close() if nextTweet != '': client.post(nextTweet) # Remove tweeted tweet from tweet.txt for next pass f = open(TWEET_FILEPATH, 'wb') index = 1 tweetsLeft = len(allTweets) - 1 while index < tweetsLeft:
def __init__(self): self.twitter_client = TwitterClient().get_client() self.weather_requester = WeatherRequester()
from TwitterClient import TwitterClient from Analysis import Analysis if __name__ == '__main__': twitter_client = TwitterClient() # twitter_client.create_stream(['crypto', 'cryptocurrency', 'xrp', 'btc', 'bitcoin', 'ripple']) # twitter_client.sample_tweets("python", 100) # twitter_client.backfill_tweets() analysis = Analysis() print(analysis.find_general_sentiment()) analysis.generate_user_sentiment()