def tweet(title, url, location=None, parsed_location=None, username=None): auth = OAuthHandler(app.config["TWITTER_CONSUMER_KEY"], app.config["TWITTER_CONSUMER_SECRET"]) auth.set_access_token(app.config["TWITTER_ACCESS_KEY"], app.config["TWITTER_ACCESS_SECRET"]) api = API(auth) urllength = 23 # Current Twitter standard for HTTPS (as of Oct 2014) maxlength = 140 - urllength - 1 # == 116 if username: maxlength -= len(username) + 2 locationtag = u"" if parsed_location: locationtags = [] for token in parsed_location.get("tokens", []): if "geoname" in token and "token" in token: locname = token["token"].strip() if locname: locationtags.append(u"#" + locname.title().replace(u" ", "")) locationtag = u" ".join(locationtags) if locationtag: maxlength -= len(locationtag) + 1 if not locationtag and location: # Make a hashtag from the first word in the location. This catches # locations like 'Anywhere' which have no geonameid but are still valid locationtag = u"#" + re.split("\W+", location)[0] maxlength -= len(locationtag) + 1 if len(title) > maxlength: text = title[: maxlength - 1] + u"…" else: text = title[:maxlength] text = text + " " + url # Don't shorten URLs, now that there's t.co if locationtag: text = text + " " + locationtag if username: text = text + " @" + username api.update_status(text)
def tweet(title, url, location=None, parsed_location=None): auth = OAuthHandler(app.config['TWITTER_CONSUMER_KEY'], app.config['TWITTER_CONSUMER_SECRET']) auth.set_access_token(app.config['TWITTER_ACCESS_KEY'], app.config['TWITTER_ACCESS_SECRET']) api = API(auth) urllength = 23 # Current Twitter standard for HTTPS (as of Oct 2014) maxlength = 140 - urllength - 1 # == 116 locationtag = u'' if parsed_location: locationtags = [] for token in parsed_location.get('tokens', []): if 'geoname' in token and 'token' in token: locname = token['token'].strip() if locname: locationtags.append(u'#' + locname.title().replace(u' ', '')) locationtag = u' '.join(locationtags) if locationtag: maxlength -= len(locationtag) + 1 if not locationtag and location: # Make a hashtag from the first word in the location. This catches # locations like 'Anywhere' which have no geonameid but are still valid locationtag = u'#' + re.split('\W+', location)[0] maxlength -= len(locationtag) + 1 if len(title) > maxlength: text = title[:maxlength-1] + u'…' else: text = title[:maxlength] text = text + ' ' + url # Don't shorten URLs, now that there's t.co if locationtag: text = text + ' ' + locationtag api.update_status(text)
def __init__(self): global API auth = OAuthHandler(ckey, csecret) auth.set_access_token(atoken, asecret) self.twitterStream = Stream(auth, listener()) API = tweepy.API(auth) verifyDir(IMAGE_DIR)
def create_tweepy_stream(self): # This handles Twitter authentication and the connection to Twitter Streaming API l = TweepyStreamListener(self.async_queue) auth = OAuthHandler(secrets.consumer_key, secrets.consumer_secret) auth.set_access_token(secrets.access_token, secrets.access_token_secret) stream = Stream(auth, l) return stream
def unwrapped_callback(self, resp): if resp is None: raise LoginCallbackError(_("You denied the request to login")) # Try to read more from the user's Twitter profile auth = TwitterOAuthHandler(self.consumer_key, self.consumer_secret) if self.access_key is not None and self.access_secret is not None: auth.set_access_token(self.access_key, self.access_secret) else: auth.set_access_token(resp['oauth_token'], resp['oauth_token_secret']) api = TwitterAPI(auth) try: twinfo = api.lookup_users(user_ids=[resp['user_id']])[0] fullname = twinfo.name avatar_url = twinfo.profile_image_url_https.replace('_normal.', '_bigger.') except TweepError: fullname = None avatar_url = None return {'userid': resp['user_id'], 'username': resp['screen_name'], 'fullname': fullname, 'avatar_url': avatar_url, 'oauth_token': resp['oauth_token'], 'oauth_token_secret': resp['oauth_token_secret'], 'oauth_token_type': None, # Twitter doesn't have token types }
def run(self): l = StdOutListener(self) auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) stream = Stream(auth, l) stream.filter(track=self.tags, languages=['en'])
def authenticate(self): """ Set up the connection """ auth = OAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(self.access_token, self.access_token_secret) return(auth)
def __call__(self): self.items = queue.Queue() auth = OAuthHandler(mykeys.ckey, mykeys.csecret) auth.set_access_token(mykeys.atoken, mykeys.asecret) self.stream = Stream(auth, self) self.stream.filter(track=self.terms, async=True) return self
def stream_twitter(battle_id): #Avoiding circular import from battle.models import Battle battle = Battle.objects.get(id=battle_id) if battle.end_time < timezone.now(): return battle.battlehashtags_set.update(typos=0, words=0) battle_hashtags = battle.battlehashtags_set.all().prefetch_related('hashtag') if battle_hashtags.count() == 0: return hashtag_values = [x.hashtag.value for x in battle_hashtags] listener = TwitterStreamListener(battle_hashtags) auth = OAuthHandler( settings.TWITTER_CONSUMER_KEY, settings.TWITTER_CONSUMER_SECRET ) auth.set_access_token( settings.TWITTER_ACCESS_TOKEN, settings.TWITTER_ACCESS_TOKEN_SECRET ) stream = Stream(auth, listener) delay = battle.end_time - timezone.now() Timer(delay.total_seconds(), stream.disconnect).start() stream.filter(track=hashtag_values, languages=['en'])
def __init__(self,slacker): # auth auth = OAuthHandler(settings.twitter_consumer_key, settings.twitter_consumer_secret) auth.set_access_token(settings.twitter_access_token, settings.twitter_access_token_secret) # out l = StdOutListener(slacker) # stream stream = Stream(auth, l) print("opening twitter stream") # if only a certain list if FILTER_LIST: api = API(auth) employees = api.list_members(LIST_USER,LIST) list = map(lambda val: str(val),employees.ids()) #print(list) print("only List: "+LIST) stream.filter(follow=list) elif FILTER_KEYWORDS: print("only Keywords: "+str(KEYWORDS)) stream.filter(track=KEYWORDS) else: print("your timeline") stream.userstream()
def main(): auth = OAuthHandler(ckey, csecret) auth.set_access_token(atoken, asecret) api = tweepy.API(auth) # specify the main machine's server address couch = couchdb.Server('http://*****:*****@115.146.84.141:5984/') try: database = couch['train'] except: database = couch.create('train') cursor = tweepy.Cursor(api.search, geocode="39.091919,-94.5757195,1000km", since="2015-05-03", until="2015-05-10", lang="en").items() while True: try: tweet = cursor.next() database.save(tweet) except tweepy.TweepError: print 'waiting...' time.sleep(60*15) except StopIteration: break
def unwrapped_callback(self, resp): if resp is None: raise LoginCallbackError(_("You denied the request to login")) # Try to read more from the user's Twitter profile auth = TwitterOAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(resp['oauth_token'], resp['oauth_token_secret']) api = TwitterAPI(auth) try: twinfo = api.verify_credentials(include_entities='false', skip_status='true', include_email='true') fullname = twinfo.name avatar_url = twinfo.profile_image_url_https.replace('_normal.', '_bigger.') email = getattr(twinfo, 'email', None) except TweepError: fullname = None avatar_url = None email = None return {'email': email, 'userid': resp['user_id'], 'username': resp['screen_name'], 'fullname': fullname, 'avatar_url': avatar_url, 'oauth_token': resp['oauth_token'], 'oauth_token_secret': resp['oauth_token_secret'], 'oauth_token_type': None, # Twitter doesn't have token types }
def handle(self, *args, **options): politicians = Politician.objects.all(); politician_keywords = [] for politician in politicians: politician_keywords.append(politician.first_name + " " + politician.last_name) if politician.twitter_url: indexSlash = politician.twitter_url.rfind("/") indexQuestionMark = politician.twitter_url.rfind("?") if indexQuestionMark != -1: twitter = politician.twitter_url[indexSlash+1:indexQuestionMark] else: twitter = politician.twitter_url[indexSlash+1:] politician_keywords.append(twitter) # create instance of the tweepy tweet stream listener listener = TweetStreamListener() # set twitter keys/tokens auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) # create instance of the tweepy stream stream = Stream(auth, listener) # search twitter for "congress" keyword stream.filter(track=politician_keywords)
def get_tweets(): access_token,access_secret,consumer_key,consumer_secret = read_config() auth = OAuthHandler(consumer_key,consumer_secret) auth.set_access_token(access_token,access_secret) global hashes count = 0 api = tweepy.API(auth) hashes = hashes.replace("'","").split(",") for hashtag in hashes: tweets = api.search(hashtag) for tweet in tweets: #print tweet.text twitter_json = {} twitter_json["created_at"] = str(tweet.created_at) twitter_json["caption"] = tweet.text twitter_json["username"] = tweet.user.name twitter_json["thumbs"] = sentiment.check_sentiments(tweet.text) twitter_json["source"] = "twitter" twitter_json["link"] = "https://twitter.com/"+str(tweet.user.screen_name)+"/status/"+str(tweet.id) print twitter_json["link"] if 'media' in tweet.entities: twitter_json["url"] = tweet.entities['media'][0]['media_url'] else: twitter_json["url"] = "" push_mongo(twitter_json)
def main(): l = StdOutListener() auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) backend = FileBackend("./test-db") stream = Stream(auth, l) stream.filter(track=['トレクル'])
def results(request): global tweets,review,confidence,positive,negative tweets = [] review = [] confidence = [] positive = [] negative = [] global auth,api auth = OAuthHandler(consumer_key,consumer_secret) auth.set_access_token(access_token,access_secret) api = tweepy.API(auth) ky = 'purplestem' dys = 1 if 'kywrd' in request.GET and request.GET['kywrd']: ky = request.GET['kywrd'] if 'days' in request.GET and request.GET['days']: dys = request.GET['days'] past(ky,dys) compute() return render(request,'final.html',{'satisfaction':satisfaction,'pos':positive,'neg':negative})
def stream(buff, terms): l = StdOutListener(buff) auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) stream = Stream(auth, l) stream.filter(track=[terms])
def get_tweets(tweeter_id, from_id = None): #Setup #l = StdOutListener() auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = API(auth) #Get tweets (status) from Twitter A if from_id == None: status = api.user_timeline(user_id = tweeter_id, count=amont_of_tweets) else: status = api.user_timeline(user_id = tweeter_id, count=amont_of_tweets, max_id = from_id) status.pop(0) #Remove duplicate first tweet, max_id not included. tweetlist = [] last_id = 0 #print("Cleaning tweets from :", status.user.screen_name, "id: ", tweeter_id) for items in status: tweet = {} tweet["id"] = items.id tweet["user_id"] = tweeter_id tweet["nickname"] = items.user.screen_name tweet["realname"] = items.user.name tweet["date"] = datetime.strptime(str(items.created_at), "%Y-%m-%d %H:%M:%S") tweet["text"] = items.text tweetlist.append(tweet) last_id = items.id print("Last ID: ", last_id, "\n") return tweetlist, last_id
class TweetPlugin: ''' Catches the "autotweet" command ''' def __init__(self): ''' Initialize twitter ''' config = ConfigParser.ConfigParser() config.read('.twitter') consumer_key = config.get('apikey', 'key') consumer_secret = config.get('apikey', 'secret') access_token = config.get('token', 'token') access_token_secret = config.get('token', 'secret') stream_rule = config.get('app', 'rule') account_screen_name = config.get('app', 'account_screen_name').lower() self.account_user_id = config.get('app', 'account_user_id') self.auth = OAuthHandler(consumer_key, consumer_secret) self.auth.set_access_token(access_token, access_token_secret) self.twitterApi = API(self.auth) def list(self): pass def run(self,user,message): if utilities.getCommand() == "autotweet": streamListener = ReplyToTweet() streamListener.setAPI(self.twitterApi) streamListener.setUser(self.account_user_id) twitterStream = Stream(self.auth, streamListener) twitterStream.userstream(_with='user')
class IRCListener(StreamListener): def __init__(self, config, bot): self.bot = bot self.auth = OAuthHandler(config["auth"]["consumer_key"], config["auth"]["consumer_secret"]) self.auth.set_access_token(config["auth"]["access_token"], config["auth"]["access_token_secret"]) api = tweepy.API(self.auth) stream = Stream(self.auth, self) self.users = [str(api.get_user(u).id) for u in config["follow"]] stream.filter(follow=self.users, async=True) log.debug("a twitter.IRCListener instance created") def on_data(self, data): parsed = json.loads(data) if "text" in parsed and parsed["user"]["id_str"] in self.users: # TODO: use Twisted color formatting ourtweeter = parsed["user"]["name"] ourtweet = parsed["text"] statusLinkPart = " - https://twitter.com/" + parsed["user"]["screen_name"] + "/status/" + parsed["id_str"] self.bot.announce(ourtweeter, " tweeted ", ourtweet, statusLinkPart, specialColors=(None, None, attributes.fg.blue, None)) return True def on_error(self, status): log.debug("Twitter error: " + str(status))
def TwitterStream(kwords, lim, lang=['en'], loca=[-180, -90, 180, 90]): # print kwords, lang, lim, loca global limit if type(lim) != tuple: l = StdOutListener() limit = int(lim) else: day = int(lim[0]) hour = int(lim[1]) minute = int(lim[2]) second = int(lim[3]) l = StdOutListener_time() print time.time() limit = time.time() + 86400 * day + 3600 * \ hour + 60 * minute + 1 * second print limit auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) global results results = list() stream = Stream(auth, l) # print kwords, lang stream.filter(track=kwords, languages=['en']) # def filter(self, follow=None, track=None, async=False, locations=None, # stall_warnings=False, languages=None, encoding='utf8'): return results
class TwitterPlayer(player.Player): def __init__(self, model, code, access_token, access_token_secret, opponent): player.Player.__init__(self, model, code) self._opponent = opponent self._last_id = None self._auth = OAuthHandler(auth.consumer_key, auth.consumer_secret) self._auth.set_access_token(access_token, access_token_secret) self._api = API(self._auth) self._listener = TwitterListener(self, self._api) self._stream = Stream(self._auth, self._listener) @property def username(self): return self._auth.get_username() def allow(self): print 'This is the opponent\'s turn...' self._stream.userstream() def update(self, event): if event.player == self.code: return message = '@%s %s' % (self._opponent, self._model.events[-1][1]) self.tweet(message) def tweet(self, message): if self._last_id is None: self._api.update_status(message) else: self._api.update_status(message, self._last_id)
def _request_tweets(self, search_word, since_id): auth = OAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(self.access_token, self.access_secret) tweets = [] api = tweepy.API(auth) max_id = None new_since_id = None total = 0 logger.info("start search by %s" % search_word) while True: tweets_batch = api.search(search_word, max_id=max_id, since_id=since_id) logger.info("get " + str(len(tweets_batch)) + " tweets by '" + search_word + "'") if not new_since_id: new_since_id = tweets_batch.since_id if max_id == tweets_batch.max_id: break max_id = tweets_batch.max_id total += len(tweets_batch) for tweet in tweets_batch: tweets.append(tweet._json) if not max_id: break logger.info("done with search found %s new tweets" % total) return new_since_id, tweets
def run_twitter_query(): l = StdOutListener() auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) stream = Stream(auth, l) #names = list(np.array(get_companies())[:,1]) #print names[num1:num2] d = hand_made_list() search_list = [] for key, value in d.items(): if key == 'SPY': search_list.append(value[0]) # append the full anme of the symbol search_list.append('#SP500') # Don't append #SPY because it's not helpful search_list.append('$SP500') elif key == 'F': # search_list.append(value[0]) # append the full name of the symbol search_list.append('Ford') # append the name of the symbol elif key == 'GE': search_list.append('General Electric') # append the full anme of the symbol elif key == 'S': search_list.append('Sprint') # append the full anme of the symbol elif key == 'T': search_list.append('AT&T') # append the full anme of the symbol elif key == 'MU': search_list.append('Micron Tech') # append the full anme of the symbol elif key == 'TRI': search_list.append('Thomson Reuters') # append the full anme of the symbol else: for cell in value: search_list.append(cell) stream.filter(track=search_list)
def process(self, statement): confidence = self.classifier.classify(statement.text.lower()) tokens = nltk.word_tokenize(str(statement)) tagged = nltk.pos_tag(tokens) nouns = [word for word, pos in tagged if (pos == 'NN' or pos == 'NNP' or pos =='JJ' or pos == 'NNS' or pos == 'NNPS')] downcased = [x.lower() for x in nouns] searchTerm = " ".join(downcased).encode('utf-8') #"http://where.yahooapis.com/v1/places.q('Place name')?appid=yourappidhere" st="" if len(nouns) != 0: auth = OAuthHandler(twitter_consumer_key, twitter_consumer_secret) auth.set_access_token(twitter_access_key, twitter_access_secret) api = tweepy.API(auth) for status in tweepy.Cursor(api.search, q='#'+searchTerm).items(20): st = st+status.text response = Statement("Jarvis: "+st) else: response = Statement("Jarvis: "+"Sorry sir, Nothing Found") return confidence, response #what's trending in city #movie reviews #people talking about some topic
def minetweets(): line = StdOutListener() auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) stream = Stream(auth, line) # stream.filter(track=['Watson', 'Cognitive', 'Machine Learning']) stream.filter(track=args, languages=["en"])
class MainRunner: def __init__(self, conf): self._conf = conf self._out = None self.listeners = [] self._auth = OAuthHandler(conf.consumer_key, conf.consumer_secret) self._auth.set_access_token(conf.access_token, conf.access_token_secret) self._run_listener() def _run_listener(self): listener = Listener(self.out, self._conf.places) stream = Stream(self._auth, listener) locations = [] for city in self._conf.places: locations.extend(city['southwest'].values()) locations.extend(city['northeast'].values()) stream.filter(locations=locations) @property def out(self): if self._out is None: try: self._out = open(self._conf.output, 'a') except FileNotFoundError: if path.exists('output.txt'): self._out = open('output.txt', 'a') else: self._out = open('output.txt', 'a') return self._out def __del__(self): self.out.close()
def main(self): #twitter authorization auth = OAuthHandler(AuthDetails.consumer_key, AuthDetails.consumer_secret) auth.set_access_token(AuthDetails.access_token, AuthDetails.access_token_secret) language = 'en' pt = ProcessTweet() searchTerm = pt.unicodetostring(self.searchTerm) stopAt = pt.unicodetostring(self.stopAt) #calls method to train the classfier tr = Training() (priors, likelihood) = tr.starttraining() #stream tweets from twitter twitterStream = Stream(auth, Listener(searchTerm, stopAt)) twitterStream.filter(track=[searchTerm], languages=[language]) sen = Sentiment() sentiment_tally = Counter() (sentiment_tally, tweet_list) = sen.gettweetstoanalyse(priors, likelihood, searchTerm) tr = Training() sen = Sentiment() (neutral, positive, negative) = sen.analyse(sentiment_tally) tweet_list = self.edittweetlists(tweet_list) #truncate streamtweets table self.removetweetsfromdatabase() #save training data tr.savetrainingdatatodb(priors, likelihood) return (neutral, positive, negative, tweet_list)
def init_stream(self): self.q = Queue() self.keywords = [] self.listener = TwitterStreamListener(self.keywords, self.q) auth = OAuthHandler(config.con_secret, config.con_secret_key) auth.set_access_token(config.token, config.token_key) self.stream = Stream(auth, self.listener)
def init_api(self): oauth_handler = TweepyOAuthHandler(self._consumer_key, self._consumer_secret, secure=configuration.twitter['use_https']) oauth_handler.set_access_token(self._access_token_key, self._access_token_secret) self._api = BaseTweepyApi(oauth_handler, secure=configuration.twitter['use_https'])
class TwitterClient(object): def __init__(self): """ Class constructor or initialization method. """ # keys and tokens from the Twitter Dev Console consumer_key = consumerkey consumer_secret = consumersecret access_token = accesskey access_token_secret = accesssecret # attempt authentication try: # create OAuthHandler object self.auth = OAuthHandler(consumer_key, consumer_secret) # set access token and secret self.auth.set_access_token(access_token, access_token_secret) # create tweepy API object to fetch tweets self.api = tweepy.API(self.auth) except: print("Error: Authentication Failed") def clean_tweet(self, tweet): """ Uses regex to remove special characters, links and clean the text. """ return ' '.join( re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split()) def get_tweet_sentiment(self, tweet): """ Textblob is a sentiment analysis tool. """ # create TextBlob object of passed tweet text analysis = TextBlob(self.clean_tweet(tweet)) # set sentiment if analysis.sentiment.polarity >= 0: return 'positive' else: return 'negative' def get_tweets(self, query, count=10000, tweet_mode="extended"): """ Main function to fetch tweets and parse them. """ tweets = [] try: # call twitter api to fetch tweets fetched_tweets = self.api.search(q=query, count=count) # parsing tweets one by one for tweet in fetched_tweets: # empty dictionary to store required params of a tweet parsed_tweet = {} # saving text of tweet parsed_tweet['text'] = tweet.text parsed_tweet['time'] = tweet.created_at parsed_tweet['location'] = tweet.user.location # saving sentiment of tweet parsed_tweet['sentiment'] = self.get_tweet_sentiment( tweet.text) # appending parsed tweet to tweets list if tweet.retweet_count > 0: # if tweet has retweets, ensure that it is appended only once if parsed_tweet not in tweets: tweets.append(parsed_tweet) else: tweets.append(parsed_tweet) # return parsed tweets return tweets except tweepy.TweepError as e: # print error (if any) print("Error : " + str(e))
class Twitter: """A helper for talking to Twitter APIs.""" def __init__(self, logs_to_cloud): self.logs_to_cloud = logs_to_cloud self.logs = Logs(name="twitter", to_cloud=self.logs_to_cloud) self.twitter_auth = OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) self.twitter_auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) self.twitter_api = API(auth_handler=self.twitter_auth, retry_count=API_RETRY_COUNT, retry_delay=API_RETRY_DELAY, retry_errors=API_RETRY_ERRORS, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) self.twitter_listener = None def start_streaming(self, callback): """Starts streaming tweets and returning data to the callback.""" self.twitter_listener = TwitterListener( callback=callback, logs_to_cloud=self.logs_to_cloud) twitter_stream = Stream(self.twitter_auth, self.twitter_listener) self.logs.debug("Starting stream.") twitter_stream.filter(follow=[TRUMP_USER_ID]) # If we got here because of an API error, raise it. if self.twitter_listener and self.twitter_listener.get_error_status(): raise Exception("Twitter API error: %s" % self.twitter_listener.get_error_status()) def stop_streaming(self): """Stops the current stream.""" if not self.twitter_listener: self.logs.warn("No stream to stop.") return self.logs.debug("Stopping stream.") self.twitter_listener.stop_queue() self.twitter_listener = None def tweet(self, companies, tweet): """Posts a tweet listing the companies, their ticker symbols, and a quote of the original tweet. """ link = self.get_tweet_link(tweet) text = self.make_tweet_text(companies, link) self.logs.info("Tweeting: %s" % text) self.twitter_api.update_status(text) def make_tweet_text(self, companies, link): """Generates the text for a tweet.""" # Find all distinct company names. names = [] for company in companies: name = company["name"] if name not in names: names.append(name) # Collect the ticker symbols and sentiment scores for each name. tickers = {} sentiments = {} for name in names: tickers[name] = [] for company in companies: if company["name"] == name: ticker = company["ticker"] tickers[name].append(ticker) sentiment = company["sentiment"] # Assuming the same sentiment for each ticker. sentiments[name] = sentiment # Create lines for each name with sentiment emoji and ticker symbols. lines = [] for name in names: sentiment_str = self.get_sentiment_emoji(sentiments[name]) tickers_str = " ".join(["$%s" % t for t in tickers[name]]) line = "%s %s %s" % (name, sentiment_str, tickers_str) lines.append(line) # Combine the lines and ellipsize if necessary. lines_str = "\n".join(lines) size = len(lines_str) + 1 + len(link) if size > MAX_TWEET_SIZE: self.logs.warn("Ellipsizing lines: %s" % lines_str) lines_size = MAX_TWEET_SIZE - len(link) - 2 lines_str = u"%s\u2026" % lines_str[:lines_size] # Combine the lines with the link. text = "%s\n%s" % (lines_str, link) return text def get_sentiment_emoji(self, sentiment): """Returns the emoji matching the sentiment.""" if not sentiment: return EMOJI_SHRUG if sentiment > 0: return EMOJI_THUMBS_UP if sentiment < 0: return EMOJI_THUMBS_DOWN self.logs.warn("Unknown sentiment: %s" % sentiment) return EMOJI_SHRUG def get_tweet(self, tweet_id): """Looks up metadata for a single tweet.""" # Use tweet_mode=extended so we get the full text. status = self.twitter_api.get_status(tweet_id, tweet_mode="extended") if not status: self.logs.error("Bad status response: %s" % status) return None # Use the raw JSON, just like the streaming API. return status._json def get_tweets(self, since_id): """Looks up metadata for all Trump tweets since the specified ID.""" tweets = [] # Include the first ID by passing along an earlier one. since_id = str(int(since_id) - 1) # Use tweet_mode=extended so we get the full text. for status in Cursor(self.twitter_api.user_timeline, user_id=TRUMP_USER_ID, since_id=since_id, tweet_mode="extended").items(): # Use the raw JSON, just like the streaming API. tweets.append(status._json) self.logs.debug("Got tweets: %s" % tweets) return tweets def get_tweet_text(self, tweet): """Returns the full text of a tweet.""" # The format for getting at the full text is different depending on # whether the tweet came through the REST API or the Streaming API: # https://dev.twitter.com/overview/api/upcoming-changes-to-tweets try: if "extended_tweet" in tweet: self.logs.debug("Decoding extended tweet from Streaming API.") return tweet["extended_tweet"]["full_text"] elif "full_text" in tweet: self.logs.debug("Decoding extended tweet from REST API.") return tweet["full_text"] else: self.logs.debug("Decoding short tweet.") return tweet["text"] except KeyError: self.logs.error("Malformed tweet: %s" % tweet) return None def get_tweet_link(self, tweet): """Creates the link URL to a tweet.""" if not tweet: self.logs.error("No tweet to get link.") return None try: screen_name = tweet["user"]["screen_name"] id_str = tweet["id_str"] except KeyError: self.logs.error("Malformed tweet for link: %s" % tweet) return None link = TWEET_URL % (screen_name, id_str) return link
import tweepy import csv access_token = "2922469548-z4Gojre31l0nYNri89z49XD0XMx6IZ6PNZ4ZQGV" access_token_secret = "3gpDtba3YkGKXJdlP3exmtHm3S3DCaY9kt7A22CAw2Fwf" consumer_key = "5Kq7fg615uFk9ugTMLK92Uo9F" consumer_key_secret = "S5bh8bXfmDUMmbRWTqEhNk9DXAhF7Ot7yBwwVaCfVwT6SJElRQ" class stdOutListener(StreamListener): def on_data(self,data): print(data) return True def on_error(self,status): print(status) if __name__ == '__main__': l = stdOutListener() auth = OAuthHandler(consumer_key,consumer_key_secret) auth.set_access_token(access_token,access_token_secret) stream = Stream(auth,l) stream.filter(track=['#MIvsSRH']) # api = tweepy.API(auth,wait_on_rate_limit=True)
continue except: print('error') except UnicodeEncodeError: print("UnicodeEncodeError") print('----------------------------------------------------\n') return True def on_error(self, status): print('error : ', status) def seperater_date(self, row_str): date = '2016 ' date += row_str[4:16] date = date.replace(' ','') date_obj = datetime.datetime.strptime(date, '%Y%b%d%H:%M') date_diff = date_obj + datetime.timedelta(hours=9) result = ('%s/%s/%s' % (date_diff.year, date_diff.month, date_diff.day)) print(':',result, ':') return result auth = OAuthHandler(ckey, csecret) auth.set_access_token(atoken, asecret) twitterStream = Stream(auth, Listener()) twitterStream.filter(track=keywords) #twitterStream.filter(languages='ko')
path_matcher = re.compile(r"\$\{([^}^{]+)\}") def path_constructor(loader, node): value = node.value match = path_matcher.match(value) env_var = match.group()[2:-1] return formatter.format(env_var) # + value[match.end():] yaml.add_implicit_resolver("!path", path_matcher) yaml.add_constructor("!path", path_constructor) with open(CONFIG_YAML, "r") as stream: try: config = yaml.load(stream) except yaml.YAMLError as exc: print(exc) config["Discord"] = [{k: v for k, v in instance.items() if v is not None} for instance in config["Discord"]] auth = OAuthHandler(config["Twitter"]["consumer_key"], config["Twitter"]["consumer_secret"]) auth.set_access_token(config["Twitter"]["access_token"], config["Twitter"]["access_token_secret"]) if __name__ == "__main__": print(config)
tweet = tweet.replace('\r', ' ') tweet = tweet.replace('\\r', ' ') tweet = tweet.replace('|', ' ') #post = post.replace('\n', ' ') writer.writerow([tweet, status.author.screen_name]) #if N >= 2: return True def on_error(self,status_code): if status_code == 420: return False ''' CKEY = 'nX1iMiRwZKvnzXIRniMIwZXuA' CSEC = '4MCD1ga2R7vXn2sWjvX4DVMkFhw0zf0ta02IwikYE49v0D0muT' ATK = '1009550810240258048-1M4Dj2qdMhyUkHKCuJO8ffoVrYA5gF' ATKS = 'RWFse0TpnH9IGAPXHM4Ozyu4e6oXP0gHuOGORLs5D5ncb' if __name__ == '__main__': #l = StdOutListener() auth = OAuthHandler(CKEY, CSEC) auth.set_access_token(ATK, ATKS) with open('tweet_santafe.csv', 'w', encoding='utf8', newline='') as f1: global writer writer = csv.writer(f1) print(tweepy.api.search('santafestrong')) #stream = Stream(auth, l) #stream.filter(track=['santa fe Highschool', 'santafestrong', 'santa fe shooting'])
class TwitterClient(object): ''' Generic Twitter Class for sentiment analysis. ''' def __init__(self): ''' Class constructor or initialization method. ''' # keys and tokens from the Twitter Dev Console consumer_key = "yJsuGs8DcLZjVe4w46RlmE7EU" consumer_secret = "7LTHqVUngpY2TnrO2TUKIGDOU3pokGh1s48AhqGDArqrv6ajtv" access_token = "1090450437222948864-upQR0M9V0ChS6QKRsRMgsZnBtkZ5oT" access_token_secret = "5ntu65BcOUlU1Qwm8Nu369ijMqTkaNhl4CLb60whqXxYQ" # attempt authentication try: # create OAuthHandler object self.auth = OAuthHandler(consumer_key, consumer_secret) # set access token and secret self.auth.set_access_token(access_token, access_token_secret) # create tweepy API object to fetch tweets self.api = tweepy.API(self.auth) except: print("Error: Authentication Failed") def clean_tweet(self, tweet): ''' Utility function to clean tweet text by removing links, special characters using simple regex statements. ''' return ' '.join( re.sub( "(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])\ |(\w+:\/\/\S+)", " ", tweet).split()) def get_tweet_sentiment(self, tweet): ''' Utility function to classify sentiment of passed tweet using textblob's sentiment method ''' # create TextBlob object of passed tweet text analysis = TextBlob(self.clean_tweet(tweet)) # set sentiment if analysis.sentiment.polarity > 0: return 'positive' elif analysis.sentiment.polarity == 0: return 'neutral' else: return 'negative' def get_tweets(self, query, count=10): ''' Main function to fetch tweets and parse them. ''' # empty list to store parsed tweets tweets = [] try: # call twitter api to fetch tweets fetched_tweets = self.api.search(q=query, count=count) # parsing tweets one by one for tweet in fetched_tweets: # empty dictionary to store required params of a tweet parsed_tweet = {} # saving text of tweet parsed_tweet['text'] = tweet.text #print(tweet.place) # saving sentiment of tweet parsed_tweet['sentiment'] = self.get_tweet_sentiment( tweet.text) parsed_tweet['date'] = tweet.created_at # appending parsed tweet to tweets list if tweet.retweet_count > 0: # if tweet has retweets, ensure that it is appended only once if parsed_tweet not in tweets: tweets.append(parsed_tweet) else: tweets.append(parsed_tweet) return tweets except tweepy.TweepError as e: print("Error : " + str(e))
def authenticate_twitter_app(self): auth = OAuthHandler(twitter_credentials.CONSUMER_KEY, twitter_credentials.CONSUMER_SECRET) auth.set_access_token(twitter_credentials.ACCESS_TOKEN, twitter_credentials.ACCESS_TOKEN_SECRET) return auth
# from http.client import IncompleteRead # Python 3 from settings import * from es_search import ESClient from tweet_preprocess import twokenize, f7 topics = dict() topics_matrix = None twitter_client = None # set up ES connection es = ESClient(index=INDEX) # set up Twitter connection auth_handler = OAuthHandler(APP_KEY, APP_SECRET) auth_handler.set_access_token(OAUTH_TOKEN, OAUTH_TOKEN_SECRET) twitter_client = API(auth_handler) class TopicListener(StreamListener): def on_status(self, status): author = status.user.screen_name # ignore retweets if not hasattr(status, 'retweeted_status'): # if author != MY_NAME: text = status.text.replace('\n', '') text = ' '.join([author, text]) report = text if status.entities[u'user_mentions']: mentions = ' '.join([ entity[u'name']
Return: String -- converted file name """ return ''.join(convert_valid(one_char) for one_char in fname) def convert_valid(one_char): """Convert a character into '_' if invalid. Arguments: one_char -- the char to convert Return: Character -- converted char """ valid_chars = "-_.%s%s" % (string.ascii_letters, string.digits) if one_char in valid_chars: return one_char else: return '_' if __name__ == '__main__': parser = get_parser() args = parser.parse_args() auth = OAuthHandler(config.consumer_key, config.consumer_secret) auth.set_access_token(config.access_token, config.access_secret) api = tweepy.API(auth) twitter_stream = Stream(auth, MyListener(args.data_dir, args.query, args.level)) twitter_stream.filter(track=[args.query], languages=['en'], filter_level=args.level)
except KeyError: # traceback.print_exc() if 'limit' in data: time.sleep(1) self.tweets_gone += 1 if self.tweets_gone % 1000 == 0: print('{} tweets filtered'.format(self.tweets_gone)) else: self.outFile.close() return False def on_error(self, status): print(status) if __name__ == '__main__': auth = OAuthHandler(consumerKey, consumerSecret) auth.set_access_token(accessToken, secretAccessToken) print('set auth and access') stream = Stream( auth, MyListener(time_limit=15 * 60, file_name='data/preprocessed_michigan_tweets.json')) keywords = ['corona', 'quarantine', 'covid'] stream.filter(track=keywords) print('Finished')
rpc_user = "" rpc_password = "" koto = KotodRpc(rpc_user, rpc_password) database = sqlite3.connect("tipkoto.db") db = database.cursor() db.execute("create table if not exists users (user_id text, address text)") database.commit() database.close() consumer_key = "" consumer_secret = "" access_key = "" access_secret = "" auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_key, access_secret) api = API(auth) logging.config.fileConfig("logging.conf") logger = logging.getLogger() def insert_data(user_id): database = sqlite3.connect("tipkoto.db") db = database.cursor() address = koto.call("getnewaddress") db.execute("insert into users (user_id, address) values (?, ?)", (user_id, address)) database.commit()
class Scraper: def __init__(self): self.ckey = "" self.csecret = "" self.atoken = "" self.asecret = "" self.auth = OAuthHandler(self.ckey, self.csecret) self.auth.set_access_token(self.atoken, self.asecret) self.api = tweepy.API(self.auth) def clean_tweet(self, tweet): """ clean tweets :param tweet: Tweet to analyze :type tweet str :return: Cleaned tweet :rtype: str """ return ' '.join( re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split()) def get_tweet_sentiment(self, tweet): """ Utility function to classify sentiment of passed tweet using textblob's sentiment method :param tweet: Tweet to analyze :type tweet: str :return: Sentiment :rtype: str """ analysis = TextBlob(self.clean_tweet(tweet)) if analysis.sentiment.polarity > 0.75: return 'Extremely positive' elif analysis.sentiment.polarity > 0.5: return 'Very positive' elif analysis.sentiment.polarity > 0.25: return 'Positive' elif analysis.sentiment.polarity > 0: return 'Neutral' elif analysis.sentiment.polarity == 0: return 'Neutral' else: return 'Negative' def get_tweets(self, query, count): """ Main function to fetch tweets and parse them. :param query: Query for twitter API :type query: str :param count: Number of tweets to search :type count: int :return: tweets :rtype: List """ tweets = [] try: tweet_nums = self.api.search(q=query, count=count) for tweet in tweet_nums: cleaned_tweet = { 'text': tweet.text, 'sentiment': self.get_tweet_sentiment(tweet.text) } if tweet.retweet_count > 0: if cleaned_tweet not in tweets: tweets.append(cleaned_tweet) else: tweets.append(cleaned_tweet) return tweets except tweepy.TweepError as e: print("Error : " + str(e)) def analyze_tweets_numerical(self, search_term): """ return numbers for tweet analysis :param search_term: Term to analyze :type search_term: str :return: List of % for tweet sentiment categories :rtype: List """ tweets = self.get_tweets(query=search_term, count=100000) eptweets = [ tweet for tweet in tweets if tweet['sentiment'] == "Extremely positive" ] vptweets = [ tweet for tweet in tweets if tweet['sentiment'] == "Very positive" ] ptweets = [ tweet for tweet in tweets if tweet['sentiment'] == "Positive" ] ntweets = [ tweet for tweet in tweets if tweet['sentiment'] == "Neutral" ] negtweets = [ tweet for tweet in tweets if tweet['sentiment'] == "Negative" ] return [ self._perecentage_helper(tweets, x) for x in [eptweets, vptweets, ptweets, ntweets, negtweets] ] def _perecentage_helper(self, tweets, subset): return 100 * (len(subset) / len(tweets))
class TwitterClient(object): ''' Generic Twitter Class for sentiment analysis. ''' def __init__(self): ''' Class constructor or initialization method. ''' # keys and tokens from the Twitter Dev Console consumer_key = 'KqAG0jjK3BgT2erQv9WlnRkf3' consumer_secret = 'PZLLLzM4vfAJXhyX9lslNFKfaviJ6IPKNlrDOvaVpLqOu3FILV' access_token = '913660176535666694-g6ziTkaOkGKyiVxahEbzz2O3smNGfNf' access_token_secret = 'MXaMqba1UAdjyGQtFmMcEOxCPR7u2PpMexHDGsl9s3oAJ' # attempt authentication try: # create OAuthHandler object self.auth = OAuthHandler(consumer_key, consumer_secret) # set access token and secret self.auth.set_access_token(access_token, access_token_secret) # create tweepy API object to fetch tweets self.api = tweepy.API(self.auth) except: print("Error: Authentication Failed") def clean_tweet(self, tweet): ''' Utility function to clean tweet text by removing links, special characters using simple regex statements. ''' return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split()) def cleaned(self, tweet): return ' '.join(re.sub("(@[A-Za-z0-9]+)", " ", tweet).split()) def get_emoticon_sentiment(self, tweet): sum = 0 cleaned_tweet = self.cleaned(tweet) emotion = emot.emoticons(cleaned_tweet.encode('ascii', 'ignore').decode('ascii')) for i in range(len(emotion)): a = TextBlob(emotion[i]['value']) sum = sum + (a.sentiment.polarity) print(sum) return sum def get_tweet_sentiment(self, tweet): ''' Utility function to classify sentiment of passed tweet using textblob's sentiment method ''' # create TextBlob object of passed tweet text analysis = TextBlob(self.clean_tweet(tweet)) # set sentiment if analysis.sentiment.polarity > 0: return 'positive' elif analysis.sentiment.polarity == 0: return 'neutral' else: return 'negative' def get_tweets(self, query, count=10): ''' Main function to fetch tweets and parse them. ''' # empty list to store parsed tweets tweets = [] try: # call twitter api to fetch tweets fetched_tweets = self.api.search(q=query, count=count) # parsing tweets one by one for tweet in fetched_tweets: # empty dictionary to store required params of a tweet parsed_tweet= {} # saving text of tweet parsed_tweet['text'] = tweet.text # saving sentiment of tweet parsed_tweet['sentiment'] = self.get_tweet_sentiment(tweet.text) # appending parsed tweet to tweets list if tweet.retweet_count > 0: # if tweet has retweets, ensure that it is appended only once if parsed_tweet not in tweets: tweets.append(parsed_tweet) else: tweets.append(parsed_tweet) # return parsed tweets return tweets except tweepy.TweepError as e: # print error (if any) print("Error : " + str(e))
import tweepy from tweepy import OAuthHandler from settings import twitter_app_config CONSUMER_KEY = twitter_app_config.get('CONSUMER_KEY') CONSUMER_SECRET = twitter_app_config.get('CONSUMER_SECRET') OAUTH_TOKEN = twitter_app_config.get('OAUTH_TOKEN') OAUTH_TOKEN_SECRET = twitter_app_config.get('OAUTH_TOKEN_SECRET') auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(OAUTH_TOKEN, OAUTH_TOKEN_SECRET) api = tweepy.API(auth) user = api.get_user('@neilhimself') print user.screen_name print user.followers_count for friend in user.friends(): print print friend.screen_name print friend.followers_count
# http://tweepy.readthedocs.org/en/v3.4.0/streaming_how_to.html # https://github.com/tweepy/tweepy/blob/2f3c61efbd1744db4db699f36c8db87cdcfc51c3/examples/streaming.py #Import the necessary methods from tweepy library from tweepy.streaming import StreamListener from tweepy import OAuthHandler from tweepy import Stream from twitter_test_config import twitConfig #This is a basic listener that just prints received tweets to stdout. class StdOutListener(StreamListener): def on_data(self, data): print data return True def on_error(self, status): print status if __name__ == '__main__': #This handles Twitter authentification and the connection to Twitter Streaming API l = StdOutListener() auth = OAuthHandler(twitConfig.api_key, twitConfig.api_secret) auth.set_access_token(twitConfig.access_token, twitConfig.access_token_secret) stream = Stream(auth, l) #This line filter Twitter Streams to capture data by the keywords: 'python', 'javascript', 'ruby' stream.filter(track=['python', 'javascript', 'ruby'])
json_file = open(model_file, 'r') lm_json = json_file.read() json_file.close() lm = model_from_json(lm_json) # load weights into new model lm.load_weights(weights_file) print("Loaded model from disk") return lm class StdOutListener(StreamListener): def on_data(self, data): # print(data) obj = json.loads(data) print('> {}'.format(obj["text"])) print('< {}'.format(respond_to(loaded_model, obj["text"]))) # blob = TextBlob(obj["text"]) # for sentence in blob.sentences: # print(sentence.sentiment.polarity) return True def on_error(self, status): print(status) loaded_model = load_model_from_disk('s2s_model.json', 's2s_model.h5') listener = StdOutListener() auth = OAuthHandler(secrets.consumer_key, secrets.consumer_secret) auth.set_access_token(secrets.access_token_key, secrets.access_token_secret) stream = Stream(auth, listener) stream.filter(track=['@AmazonHelp'])
twitter_listener = None # check for S3 configuration, enable if present s3_bucketname = config['io'].get('s3_bucketname', None) if s3_bucketname: twitter_listener = TwitterListener(outfile, target_count, s3_bucketname) else: print("[!] No S3 bucket name found, running in local archive mode.") twitter_listener = TwitterListener(outfile, target_count) auth = OAuthHandler(config['twitter'].get('consumer_key'), config['twitter'].get('consumer_secret')) auth.set_access_token(config['twitter'].get('access_token'), config['twitter'].get('access_token_secret')) stream = Stream(auth, twitter_listener) backoff_in_seconds = 1 while backoff_in_seconds < 65: try: print(" * Tracker String: %s" % tracker_string) stream.filter(track=[tracker_string]) except KeyboardInterrupt: print("Shutting down listener...") twitter_listener.close_file() print("Clean shutdown successful!") exit(0) except (UrlLibProtocolError, RequestsConnectionError): print("Connection reset by host, retrying in %d seconds." %
from tweepy import Stream from tweepy.streaming import StreamListener from tweepy import OAuthHandler import urllib import re import time import csv from bson import json_util CONSUMER_KEY = "" CONSUMER_SECRET = "" ACCESS_KEY = "" ACCESS_SECRET = "" auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_KEY, ACCESS_SECRET) api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) with open('IDS.csv') as f: array = [] user_ids = [] for line in f: array.append(line) print(array) for i in array: try: user_ids.append(api.get_user(i).id) except tweepy.error.TweepError as e: continue
def sendData(c_socket): auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) twitter_stream = Stream(auth, TweetsListener(c_socket)) twitter_stream.filter(track=['soccer'])
class listener(StreamListener): def on_data(self, data): all_data = json.loads(data) # collect all desired data fields if 'text' in all_data: tweet = all_data["text"] send_tweets_to_spark(tweet, conn) return True else: return True def on_error(self, status): print(status) auth = OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET) TCP_IP = "localhost" TCP_PORT = 9009 conn = None s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.bind((TCP_IP, TCP_PORT)) s.listen(1) print("Waiting for TCP connection...") conn, addr = s.accept() print("Connected... Starting getting tweets.") # create stream and filter on a searchterm twitterStream = Stream(auth, listener()) twitterStream.filter(languages=["en"], stall_warnings=True)
except AttributeError as e: print('AttributeError was returned, stupid bug') print(e) except tweepy.TweepError as e: print('Below is the printed exception') print(e) if '401' in e: # not sure if this will even work print('Below is the response that came in') print(e) time.sleep(60) else: # raise an exception if another status code was returned,we don't like other kinds time.sleep(60) except BaseException as e: print('failed ondata,', str(e)) time.sleep(5) exit() def on_error(self, status): print(status) # Instance auth = OAuthHandler(ckey, consumer_secret) # Consumer keys auth.set_access_token(access_token_key, access_token_secret) # Secret Keys # initialize Stream object with a time out limit twitterStream = Stream(auth, listener(start_time, time_limit=82800)) # bounding box filter for Washington twitterStream.filter(locations=[-124.84, 45.54, -116.92, 49.0])
def callback(): def hi(): print("Byee") exit(0) win = tk.Toplevel() #win.configure(bg='#036CB5') lod = Image.open("second.jpg") lod=lod.resize((1400,1000),Image.ANTIALIAS) render = ImageTk.PhotoImage(lod) imag = Label(win, image=render) imag.image = render imag.place(x=0, y=0) menubar = Menu(win) menubar.add_command(label="Exit",font = ('Courier', 14), command=hi) menubar.add_command(label="Compare",font = ('Courier', 14), command=b.comp) menubar.add_command(label="Graph",font = ('Courier', 14), command=g.am) win.config(menu=menubar) def tr(): s = tk.Scrollbar(win) T1 = tkinter.Text(win, height=150, width=100, font=("Courier", 14)) T1.focus_set() s.pack(side=tk.RIGHT, fill=tk.Y) T1.pack(fill=tk.Y) s.config(command=T1.yview) T1.config(yscrollcommand=s.set) file = open("tweeti.txt") data = file.read() file.close() T1.insert(tk.END,data) T1.config(state=DISABLED) ip=T.get("1.0","end-1c") B1 = tkinter.Button(win, text ="Submit", command=tr) B1.place(x = 5, y = 5, height=20, width=80) ltext = Label(win, text=ip) #tkinter.Tk() - TO CREATE NEW WINDOW ckey="42at9XEBHtZED548WGDuLymLx" csecret="cFkCeXVpxAAnJKtgca8ZnQCBLwZQKQlAmVV0ejvD9ECs9wauKs" atoken="725996785559293952-FYFy8coPR9D2oJcLXN3vYz9gRp5sDcy" asecret="p9A2fUJVFmIfUTTmku4Otn117agDrJvHK6s6cHywuRLUQ" try: class listener(StreamListener): def on_data(self, data): all_data = json.loads(data) tweet = all_data["text"] char_list = [tweet[j] for j in range(len(tweet)) if ord(tweet[j]) in range(65536)] tweet='' for j in char_list: tweet=tweet+j sentiment_value, confidence = s.sentiment(tweet) if confidence*100 >= 80: output = open("tweeti.txt","a") op = open("value.txt","a") op.write(sentiment_value) op.write('\n') output.write(sentiment_value) output.write('\n') output.write(tweet) output.write('\n\n') output.close() op.close() return(True) def on_error(self, status): print(status) auth = OAuthHandler(ckey, csecret) auth.set_access_token(atoken, asecret) twitterStream = Stream(auth, listener()) twitterStream.filter(track=[ip]) except: return(True)
class TwitterClient(object): ''' Generic Twitter Class for sentiment analysis. ''' def __init__(self): ''' Class constructor or initialization method. ''' # keys and tokens from the Twitter Dev Console consumer_key = 'bT6ojXqeHdb1eGZdEezzUQJY6' consumer_secret = 'nFtmp5yc9ITGOaVyzahg1mOHsudYCqp72FL0Bg0KIc2uGA0eYI' access_token = '1036843869088825344-Kxb079nlmg7mIpkdP2z2WMVOGYeMMT' access_token_secret = 'MkJtFStJHag1exJ5l2vKymT6Cofu1t53aNQOzxjSoULWO' # attempt authentication try: # create OAuthHandler object self.auth = OAuthHandler(consumer_key, consumer_secret) # set access token and secret self.auth.set_access_token(access_token, access_token_secret) # create tweepy API object to fetch tweets self.api = tweepy.API(self.auth) except: print("Error: Authentication Failed") def clean_tweet(self, tweet): ''' Utility function to clean tweet text by removing links, special characters using simple regex statements. ''' return ' '.join( re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split()) def get_tweet_sentiment(self, tweet): ''' Utility function to classify sentiment of passed tweet using textblob's sentiment method ''' # create TextBlob object of passed tweet text analysis = TextBlob(self.clean_tweet(tweet)) # set sentiment if analysis.sentiment.polarity > 0: return 'positive' elif analysis.sentiment.polarity == 0: return 'neutral' else: return 'negative' def get_tweets(self, query, count=10): ''' Main function to fetch tweets and parse them. ''' # empty list to store parsed tweets tweets = [] # call twitter api to fetch tweets fetched_tweets = self.api.search(q=query, count=count) # parsing tweets one by one for tweet in fetched_tweets: # empty dictionary to store required params of a tweet parsed_tweet = {} # saving text of tweet parsed_tweet['text'] = tweet.text # saving sentiment of tweet parsed_tweet['sentiment'] = self.get_tweet_sentiment(tweet.text) # appending parsed tweet to tweets list if tweet.retweet_count > 0: # if tweet has retweets, ensure that it is appended only once if parsed_tweet not in tweets: tweets.append(parsed_tweet) else: tweets.append(parsed_tweet) # return parsed tweets return tweets
print(notice) return def on_warning(self, notice): """Called when a disconnection warning message arrives""" print('on_warning') print(notice) return if __name__ == '__main__': print('Bot started.') with open('data.json') as data_file: data = json.load(data_file) data_file.close() data['twitter_ids'] = [] for element in data['Discord']: data['twitter_ids'].extend(x for x in element['twitter_ids'] if x not in data['twitter_ids']) l = StdOutListener() auth = OAuthHandler(data['Twitter']['consumer_key'], data['Twitter']['consumer_secret']) auth.set_access_token(data['Twitter']['access_token'], data['Twitter']['access_token_secret']) stream = Stream(auth, l) print('Twitter stream started.') stream.filter(follow=data['twitter_ids'])
class windowDialog(QDialog): def __init__(self): super().__init__() self.setGeometry(200, 200, 600, 800) self.createUi() self.show() apiKey = 'XXXXXXXXXXXXXXXXXXX' apiSecretKey = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' accessToken = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' accessTokenSecret = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' try: self.auth = OAuthHandler(apiKey, apiSecretKey) self.auth.set_access_token(accessToken, accessTokenSecret) self.api = tweepy.API(self.auth) except: self.msgBox = QMessageBox() self.msgBox.setText("Error : Authentication failed!!") self.msgBox.show() def createUi(self): self.createWidgets() self.setWindowTitle("Twitter Sentiment Analysis") vbox = QVBoxLayout() vbox.addWidget(self.grpbox1) vbox.addWidget(self.grpbox2) vbox.addWidget(self.grpbox3) vbox.addWidget(self.grpbox4) self.setLayout(vbox) def createWidgets(self): self.grpbox1 = QGroupBox() self.grpbox2 = QGroupBox() self.grpbox3 = QGroupBox() self.grpbox4 = QGroupBox() label1 = QLabel("Querry : ") label2 = QLabel("Count : ") self.querry = QLineEdit() self.count = QLineEdit() btn1 = QPushButton("Get Sentiments!") btn2 = QPushButton("Clear!") self.sentiments = QPlainTextEdit() self.sentiments.setReadOnly(True) hbox1 = QHBoxLayout() hbox2 = QHBoxLayout() hbox3 = QHBoxLayout() hbox4 = QHBoxLayout() hbox1.addWidget(label1) hbox1.addWidget(self.querry) hbox2.addWidget(label2) hbox2.addWidget(self.count) hbox3.addWidget(btn1) hbox3.addWidget(btn2) hbox4.addWidget(self.sentiments) self.grpbox1.setLayout(hbox1) self.grpbox2.setLayout(hbox2) self.grpbox3.setLayout(hbox3) self.grpbox4.setLayout(hbox4) btn2.clicked.connect(self.clearText) btn1.clicked.connect(self.getSentiments) def clearText(self): self.querry.setText("") self.count.setText("") self.sentiments.setPlainText("") self.finalString = "" def getSentiments(self): self.main() self.sentiments.appendPlainText(self.finalString) def cleanTweet(self, tweet): return ' '.join( re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t])|(\w+:\/\/\S+)", " ", tweet).split()) def getTweetSentiment(self, tweet): analysis = TextBlob(self.cleanTweet(tweet)) if analysis.sentiment.polarity > 0: return 'positive' elif analysis.sentiment.polarity == 0: return 'neutral' else: return 'negative' def getTweets(self, query, count=20): tweets = [] try: fetched_tweets = self.api.search(q=query, count=count) for tweet in fetched_tweets: parsed_tweets = {} parsed_tweets['text'] = tweet.text parsed_tweets['sentiment'] = self.getTweetSentiment(tweet.text) if tweet.retweet_count > 0: if parsed_tweets not in tweets: tweets.append(parsed_tweets) else: tweets.append(parsed_tweets) return tweets except tweepy.TweepError as e: print("Error : " + str(e)) def main(self): tweets = window.getTweets(query=self.querry.text(), count=int(self.count.text())) self.finalString = "" ptweets = [ tweet for tweet in tweets if tweet['sentiment'] == 'positive' ] self.finalString = self.finalString + ( "Positive tweets percentage: {} %".format( 100 * len(ptweets) / len(tweets))) + "\n" ntweets = [ tweet for tweet in tweets if tweet['sentiment'] == 'negative' ] self.finalString = self.finalString + ( "Negative tweets percentage: {} %".format( 100 * len(ntweets) / len(tweets))) + "\n" self.finalString = self.finalString + ( "Neutral tweets percentage: {} % \ ".format( 100 * (len(tweets) - len(ntweets) - len(ptweets)) / len(tweets))) + "\n" self.finalString = self.finalString + ("\n\nPositive tweets:\n") i = 1 for tweet in ptweets[:5]: self.finalString = self.finalString + str(i) + ". " + str( (tweet['text'])) self.finalString = self.finalString + "\n" i = i + 1 self.finalString = self.finalString + ("\n\nNegative tweets:\n") j = 1 for tweet in ntweets[:5]: self.finalString = self.finalString + str(j) + ". " + str( (tweet['text'])) self.finalString = self.finalString + "\n" j = j + 1
def get_all_tweets(screen_name): #Twitter only allows access to a users most recent 3240 tweets with this method #authorize twitter, initialize tweepy @classmethod def parse(cls, api, raw): status = cls.first_parse(api, raw) setattr(status, 'json', json.dumps(raw)) return status # Status() is the data model for a tweet tweepy.models.Status.first_parse = tweepy.models.Status.parse tweepy.models.Status.parse = parse # User() is the data model for a user profil tweepy.models.User.first_parse = tweepy.models.User.parse tweepy.models.User.parse = parse # You need to do it for all the models you need auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) api = tweepy.API(auth) #initialize a list to hold all the tweepy Tweets alltweets = [] #make initial request for most recent tweets (200 is the maximum allowed count) new_tweets = api.user_timeline(screen_name=screen_name, count=10) #save most recent tweets alltweets.extend(new_tweets) #save the id of the oldest tweet less one oldest = alltweets[-1].id - 1 #keep grabbing tweets until there are no tweets left to grab while len(new_tweets) > 0: #all subsiquent requests use the max_id param to prevent duplicates new_tweets = api.user_timeline(screen_name=screen_name, count=10, max_id=oldest) #save most recent tweets alltweets.extend(new_tweets) #update the id of the oldest tweet less one oldest = alltweets[-1].id - 1 if (len(alltweets) > 15): break print("...%s tweets downloaded so far" % (len(alltweets))) #write tweet objects to JSON #file = open('tweet.json', 'w') #print "Writing tweet objects to JSON please wait..." #for status in alltweets: # json.dump(status._json,file,sort_keys = True,indent = 4) media_files = set() for status in alltweets: media = status.entities.get('media', []) if (len(media) > 0): media_files.add(media[0]['media_url']) #close the file print(media_files) #download image media_names = set() for media_file in media_files: filename = media_file.split("/")[-1] media_names.add(filename) wget.download(media_file) #file.close() print(media_names) #convert image to video for filename in media_names: output = filename.replace(".jpg", ".mp4") cmd = "ffmpeg -loop 1 -i " + filename + " -c:a libfdk_aac -ar 44100 -ac 2 -vf \"scale='if(gt(a,16/9),1280,-1)\':\'if(gt(a,16/9),-1,720)\', pad=1280:720:(ow-iw)/2:(oh-ih)/2\" -c:v libx264 -b:v 10M -pix_fmt yuv420p -r 30 -shortest -avoid_negative_ts make_zero -fflags +genpts -t 1 " + output os.system(cmd) #describe the content of the images # Create a Vision client. vision_client = google.cloud.vision.ImageAnnotatorClient() file = open('resul.txt', 'w') #vision_client = vision.Client() # TODO (Developer): Replace this with the name of the local image # file to analyze. for image_file_name in media_names: with io.open(image_file_name, 'rb') as image_file: content = image_file.read() # Use Vision to label the image based on content. image = google.cloud.vision.types.Image(content=content) response = vision_client.label_detection(image=image) file.write("Labels for " + image_file_name + " :\n") #print('Labels:') for label in response.label_annotations: #print(label.description) file.write(label.description + "\n")
tweetstr) phrase = gTTS(text=tweetstr, lang='en-uk') phrase.save('temp.mp3') os.system('mpg123 -q temp.mp3') os.system('rm temp.mp3') try: s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) print('[Checkpoint 06] Connecting to', host, 'on port', int(port)) s.connect((host, int(port))) print('[Checkpoint 08] Sending question:', tweetstr) s.send(tweetstr.encode()) recvdata = s.recv(int(size)) s.close() print('[Checkpoint 14] Received answer:', recvdata.decode()) except socket.error as message: if s: s.close() print("Unable to open the socket: " + str(message)) sys.exit(1) return (True) def on_error(self, status_code): print(status_code) auth = OAuthHandler(ckey, csec) auth.set_access_token(atok, asec) twitStream = Stream(auth, listener()) twitStream.filter(track=track)
def tweepy(self): """Returns an authentication object required by Tweepy""" auth = OAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(self.access_token, self.access_token_secret) return auth
class TwitterClient(object): ''' Generic Twitter Class for sentiment analysis. ''' def __init__(self): ''' Class constructor or initialization method. ''' # keys and tokens from the Twitter Dev Console consumer_key = 'XXXXXXXXXXXXXXXXXXXXXXXX' consumer_secret = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXX' access_token = 'XXXXXXXXXXXXXXXXXXXXXXXXXXXX' access_token_secret = 'XXXXXXXXXXXXXXXXXXXXXXXXX' # attempt authentication try: # create OAuthHandler object self.auth = OAuthHandler(consumer_key, consumer_secret) # set access token and secret self.auth.set_access_token(access_token, access_token_secret) # create tweepy API object to fetch tweets self.api = tweepy.API(self.auth) except: print("Error: Authentication Failed") def clean_tweet(self, tweet): ''' Utility function to clean tweet text by removing links, special characters using simple regex statements. ''' return ' '.join(re.sub("(@[A-Za-z0-9]+)|([^0-9A-Za-z \t]) |(\w+:\/\/\S+)", " ", tweet).split()) def get_tweet_sentiment(self, tweet): ''' Utility function to classify sentiment of passed tweet using textblob's sentiment method ''' # create TextBlob object of passed tweet text analysis = TextBlob(self.clean_tweet(tweet)) # set sentiment if analysis.sentiment.polarity > 0: return 'positive' elif analysis.sentiment.polarity == 0: return 'neutral' else: return 'negative' def get_tweets(self, query, count = 10): ''' Main function to fetch tweets and parse them. ''' # empty list to store parsed tweets tweets = [] try: # call twitter api to fetch tweets fetched_tweets = self.api.search(q = query, count = count) # parsing tweets one by one for tweet in fetched_tweets: # empty dictionary to store required params of a tweet parsed_tweet = {} # saving text of tweet parsed_tweet['text'] = tweet.text # saving sentiment of tweet parsed_tweet['sentiment'] = self.get_tweet_sentiment(tweet.text) # appending parsed tweet to tweets list if tweet.retweet_count > 0: # if tweet has retweets, ensure that it is appended only once if parsed_tweet not in tweets: tweets.append(parsed_tweet) else: tweets.append(parsed_tweet) # return parsed tweets return tweets except tweepy.TweepError as e: # print error (if any) print("Error : " + str(e))