def main(): ''' Does a search through past tweets on the titles in the EONet JSON, and then filters by tweets with geodata and the geolocation within the polygon given in the EONet JSON. These are saved to a SQLite database. ''' # Retrieve the data from EONet (Earth Observatory Natural Event Tracker). eonet = EONet() # Search for recent tweets relating to the events provided by EONet. # This is done before monitoring the Twitter stream or listening for REST # requests. search_tweets(eonet) # Monitor the Twitter live stream for tweets relating to the EONet events. monitor_tweets(eonet)
def fillCategory(catg): timeline = twitter_search.search_tweets(catg,result_type="recent") for tweet in timeline: Tweet.objects.create(text=tweet.text.encode('utf-8'), author=tweet.from_user.encode('utf-8'), category=catg, datetime='Today', avatar=tweet.profile_image_url)
def updateCategory(catg): tweetObjects = Tweet.objects.filter(category=catg) if tweetObjects.count(): timeline = twitter_search.search_tweets(catg,result_type="recent") j = 0 for tweet in timeline: t = tweetObjects[j] t.text = tweet.text.encode('utf-8') t.author = tweet.from_user.encode('utf-8') t.category = catg t.datetime = 'Today' t.avatar = tweet.profile_image_url t.save() j = j + 1
def get_tweets(search_word, number_of_tweets): raw_tweets = search_tweets(search_word, number_of_tweets) text = "" tweets = [] for tweet in raw_tweets: tweet_dict = {} if hasattr(tweet, 'text'): tweet_dict['tweet_text'] = tweet.text text = text + " " + tweet.text else: tweet_dict['tweet_text'] = tweet.full_text text = text + " " + tweet.full_text tweet_dict['username'] = tweet.author.name tweet_dict['posted_date'] = tweet.created_at tweet_dict['img_source'] = tweet.user.profile_image_url tweets.append(tweet_dict) return tweets, text