def verTwitter(request): twitter = Twython() recorretwiter = [] for page in range(1, 4): user_timeline = twitter.getUserTimeline(screen_name="cvander", count="200", page=page, include_entities = "true") for t in user_timeline: if t['text'].find("http") >= 0: recorretwiter.append({ 'text': t['text'], 'user': "******", 'retwits': t['retweet_count'], 'url': t['entities']['urls'][0]['expanded_url'], 'fecha': t['created_at']}) user_timeline2 = twitter.getUserTimeline(screen_name="freddier", count="200", page=page, include_entities = "true") for t2 in user_timeline2: if t2['text'].find("http") >= 0: recorretwiter.append({ 'text': t2['text'], 'user': "******", 'retwits': t2['retweet_count'], 'url': t2['entities']['urls'][0]['expanded_url'], 'fecha': t2['created_at']}) t = get_template('twitter.html') c = RequestContext(request, {'user_timeline': recorretwiter}) html = t.render(c) return HttpResponse(html)
def agent(): keystone = client.Client(token=ca.creds['token'], tenant_id=ca.creds['tenantId'], auth_url=ca.creds['identity_url']) object_store_catalog = keystone.service_catalog.get_endpoints()['object-store'] region_endpoints = None for endpoints in object_store_catalog: if endpoints['region'] == ca.conf['region']: region_endpoints = endpoints if not region_endpoints: ca.log_fail("Failing, region not found in endpoint list.") exit() t = Twython() # Figure out what files already exist, and what our latest tweet is. files = [] try: (headers,files) = swiftclient.get_container(region_endpoints['publicURL'],ca.creds['token'], ca.conf['container'],full_listing=True, prefix=ca.conf['path']) except swiftclient.client.ClientException: pass files = sorted(files, key=itemgetter('name')) last_tweet = 0 last_file = '' tweet_list = [] if files: (headers,last_file) = swiftclient.get_object(region_endpoints['publicURL'],ca.creds['token'], ca.conf['container'],files[-1]['name']) headers = swiftclient.head_object(region_endpoints['publicURL'],ca.creds['token'], ca.conf['container'],files[-1]['name']) last_tweet = headers.get('x-object-meta-last-tweet-id',0) tweet_list = json.loads(last_file) # Grab our tweet list (tweets since last tweet up to 3200), optimized for # fewest requests. try: if last_tweet: tweets = t.getUserTimeline(screen_name=ca.conf['screen_name'], count=200, since_id=last_tweet, include_rts=True) else: tweets = t.getUserTimeline(screen_name=ca.conf['screen_name'], count=200, include_rts=True) except TwythonError, e: ca.log_fail("Error accessing twitter stream. User not found or twitter down.") exit()
def listUrl(request): twitter = Twython() listaTwitter = [] contador = 0 for page in range(1, 4): user_timeline = twitter.getUserTimeline(screen_name="cvander", count="200", page=page, include_entities = "true") for t in user_timeline: if t['text'].find("http") >= 0: if contador > 0: contadorIgualdad = 0 for posicion in range(0, len(listaTwitter)): if listaTwitter[posicion]['url'] == t['entities']['urls'][0]['expanded_url']: contadorIgualdad += 1 auxPosicion = posicion if contadorIgualdad > 0: listaTwitter[auxPosicion]['retwits'] += t['retweet_count'] else: listaTwitter.append({ 'retwits': t['retweet_count'], 'url': t['entities']['urls'][0]['expanded_url']}) else: listaTwitter.append({ 'retwits': t['retweet_count'], 'url': t['entities']['urls'][0]['expanded_url']}) contador += 1 user_timeline2 = twitter.getUserTimeline(screen_name="freddier", count="200", page=page, include_entities = "true") for t2 in user_timeline2: if t2['text'].find("http") >= 0: if contador > 0: contadorIgualdad = 0 for posicion in range(0, len(listaTwitter)): if listaTwitter[posicion]['url'] == t2['entities']['urls'][0]['expanded_url']: contadorIgualdad += 1 auxPosicion = posicion if contadorIgualdad > 0: listaTwitter[auxPosicion]['retwits'] += t2['retweet_count'] else: listaTwitter.append({ 'retwits': t2['retweet_count'], 'url': t2['entities']['urls'][0]['expanded_url']}) else: listaTwitter.append({ 'retwits': t2['retweet_count'], 'url': t2['entities']['urls'][0]['expanded_url']}) contador += 1 listaOrdenada = quicksort(listaTwitter, 0, len(listaTwitter) - 1, 'desc') #assert False t = get_template('lista_url.html') c = RequestContext(request, {'twits': listaOrdenada}) html = t.render(c) return HttpResponse(html)
def handle(self, *args, **options): api = Twython() cached_statuses = {} cached_user_since_ids = {} cached_user_max_ids = {} statuses = (Status.objects.filter(text__isnull=True) | Status.objects.filter(user__isnull=True) | Status.objects.filter(created_at__isnull=True)) self.stdout.write("Broken statuses: {0}\n".format(len(statuses))) for s in statuses: if s.id in cached_statuses: status = cached_statuses[s.id] else: try: status = Status.construct_from_dict(api.showStatus(id=s.id)) except APILimit: self.stdout.write('Rate limit exceeded\n') break except TwythonError: continue try: max_id = cached_user_max_ids[status.user.id] except KeyError: max_id = None try: userstatuses = Status.construct_from_dicts( api.getUserTimeline(user_id=status.user.id, count=200, max_id=max_id)) for us in userstatuses: cached_statuses[us.id] = us cached_user_since_ids[status.user.id] = min([s.id for s in userstatuses]) except ValueError: pass status.save() self.stdout.write('Repaired Status id {0}\n'.format(status.id))
def parseTweets(username, hashtag): try: from twython import Twython twitter = Twython() if(username is not None): tweets = twitter.getUserTimeline( screen_name = username ) for t in tweets: it = Item(title=texto2Unicode(t["text"]), desc=texto2Unicode(t["text"]), author=texto2Unicode(t["user"]["screen_name"]), category=texto2Unicode("twitter"), link="", img=texto2Unicode(t["user"]["profile_image_url_https"]), pub_date=texto2Unicode(t["created_at"])) checkAndSaveEntry(it) if(hashtag is not None): twhash = twitter.search(q = hashtag) for t in twhash["results"]: it = Item(title=texto2Unicode(t["text"]), desc=texto2Unicode(t["text"]), author=texto2Unicode(t["from_user"]), category=texto2Unicode("twitter"), link="", img=texto2Unicode(t["profile_image_url_https"]), pub_date=texto2Unicode(t["created_at"])) checkAndSaveEntry(it) except Exception, e: print("ExceptionTW: %s" %e) return 0
def handle(self, *args, **options): twython = Twython( app_key=settings.TWITTER_APP_KEY, app_secret=settings.TWITTER_APP_SECRET, oauth_token=settings.TWITTER_OAUTH_TOKEN, oauth_token_secret=settings.TWITTER_OAUTH_SECRET, ) legs = Officials.objects.filter(active="True").exclude(twitter="").only("legid", "fullname", "twitter") print "Downloading new tweets for legislators." for counter, o in enumerate(legs): try: timeline = twython.getUserTimeline(screen_name=o.twitter)[:30] except: print "Could not download tweets for {0} (id: {1}, handle: {2})".format(o.fullname, o.legid, o.twitter) continue for t in timeline: if OfficialTweets.objects.filter(tweet_id=t["id_str"]).exists(): continue else: new_tweet = OfficialTweets.objects.create( legid=o.legid, tweet=t["text"], tweet_id=t["id_str"], timestamp=dp.parse(t["created_at"]) ) new_tweet.save() print "Downloading Twitter Oembed HTML for tweets" for tweet in OfficialTweets.objects.filter(oembed="").order_by("-timestamp"): oembed_dict = twython.getOembedTweet(id=tweet.tweet_id) tweet.oembed = oembed_dict["html"] tweet.save()
def parseTweets(username, hashtag): try: from twython import Twython twitter = Twython() if (username is not None): tweets = twitter.getUserTimeline(screen_name=username) for t in tweets: it = Item(title=texto2Unicode(t["text"]), desc=texto2Unicode(t["text"]), author=texto2Unicode(t["user"]["screen_name"]), category=texto2Unicode("twitter"), link="", img=texto2Unicode( t["user"]["profile_image_url_https"]), pub_date=texto2Unicode(t["created_at"])) checkAndSaveEntry(it) if (hashtag is not None): twhash = twitter.search(q=hashtag) for t in twhash["results"]: it = Item(title=texto2Unicode(t["text"]), desc=texto2Unicode(t["text"]), author=texto2Unicode(t["from_user"]), category=texto2Unicode("twitter"), link="", img=texto2Unicode(t["profile_image_url_https"]), pub_date=texto2Unicode(t["created_at"])) checkAndSaveEntry(it) except Exception, e: print("ExceptionTW: %s" % e) return 0
def handle(self, *args, **options): twython = Twython(app_key=settings.TWITTER_APP_KEY, app_secret=settings.TWITTER_APP_SECRET, oauth_token=settings.TWITTER_OAUTH_TOKEN, oauth_token_secret=settings.TWITTER_OAUTH_SECRET) legs = Officials.objects.filter(active='True').exclude(twitter='').only('legid', 'fullname', 'twitter') print "Downloading new tweets for legislators." for counter, o in enumerate(legs): try: timeline = twython.getUserTimeline(screen_name=o.twitter)[:30] except: print "Could not download tweets for {0} (id: {1}, handle: {2})".format(o.fullname, o.legid, o.twitter) continue for t in timeline: if OfficialTweets.objects.filter(tweet_id=t['id_str']).exists(): continue else: new_tweet = OfficialTweets.objects.create(legid=o.legid, tweet=t['text'], tweet_id=t['id_str'], timestamp=dp.parse(t['created_at'])) new_tweet.save() print "Downloading Twitter Oembed HTML for tweets" for tweet in OfficialTweets.objects.filter(oembed='').order_by('-timestamp'): oembed_dict = twython.getOembedTweet(id=tweet.tweet_id) tweet.oembed = oembed_dict['html'] tweet.save()
def ajax_user_timeline(request): if not request.user.is_authenticated() or 'twitter_tokens' not in request.session: return HttpResponse("") results = {'success': 'False'} if request.method != u'GET': return HttpResponseBadRequest('Must be GET request') if not request.GET.has_key(u'screenname'): return HttpResponseBadRequest('screenname missing') if not request.GET.has_key(u'max_id'): return HttpResponseBadRequest('start id missing') if not request.GET.has_key(u'page'): return HttpResponseBadRequest('page number missing') screenname = request.GET[u'screenname'] max_id = request.GET[u'max_id'] page = request.GET[u'page'] if 'twitter_tokens' in request.session: twitter_tokens = request.session['twitter_tokens'] api = get_authorized_twython(twitter_tokens) else: # Get public api if no authentication possible api = Twython() results['statuses'] = api.getUserTimeline(screen_name=screenname, max_id=max_id, page=page) t = get_template('twitter/status_list.html') results['success'] = 'True' html = t.render(RequestContext(request, results)) return HttpResponse(html)
def userTL(user=''): twitter = Twython() user_timeline = twitter.getUserTimeline(screen_name=user, count='200') all_tweets='' for tweet in user_timeline: text = tweet['text'].encode('ascii', 'replace') all_tweets +=' ' + text return tokenize(all_tweets)
def userTL(user=''): twitter = Twython() user_timeline = twitter.getUserTimeline(screen_name=user, count='200') all_tweets = '' for tweet in user_timeline: text = tweet['text'].encode('ascii', 'replace') all_tweets += ' ' + text return tokenize(all_tweets)
def get_tweets_by_user(request, username): oauth_token = request.session['authorization_token'] oauth_token_secret = request.session['authorization_token_secret'] twython = Twython(settings.APP_KEY, settings.APP_SECRET, oauth_token, oauth_token_secret) raw_tweets = twython.getUserTimeline(screen_name=username, count=20) return save_tweets(raw_tweets)
def get_opta_tweet(): print "getting latest opta tweets" print "Does tweet dict exist?" print r.exists('opta_tweet') if not r.exists('opta_tweet'): t = Twython(app_key='nfxOwOrDiKuyG4AQzT3iSw', app_secret='6ogL1EZ7qNDmwvgz7zSCEe8b9AZ9pyjtEMB2ebwUWQs', oauth_token='32849299-qCdJK4y9uaDw03NWeVWuInydqYospWCSCQOfvxhjc', oauth_token_secret='ReLXHkrwrKIdyqSAbjAOMXbJ8CmsNZS9dvvYJ5q4') auth_tokens = t.get_authorized_tokens() opta_timeline = t.getUserTimeline(screen_name="optajoe",count="50") for tweet in opta_timeline: text = unicodedata.normalize('NFKD', tweet['text']).encode('ascii','ignore') tweet_url = "http://twitter.com/Optajoe/status/%s"%tweet['id_str'] r.hset('opta_tweet',tweet_url,text) r.expire('opta_tweet', 86400)
def get_opta_tweet(): print "getting latest opta tweets" print "Does tweet dict exist?" print r.exists('opta_tweet') if not r.exists('opta_tweet'): t = Twython( app_key='nfxOwOrDiKuyG4AQzT3iSw', app_secret='6ogL1EZ7qNDmwvgz7zSCEe8b9AZ9pyjtEMB2ebwUWQs', oauth_token='32849299-qCdJK4y9uaDw03NWeVWuInydqYospWCSCQOfvxhjc', oauth_token_secret='ReLXHkrwrKIdyqSAbjAOMXbJ8CmsNZS9dvvYJ5q4') auth_tokens = t.get_authorized_tokens() opta_timeline = t.getUserTimeline(screen_name="optajoe", count="50") for tweet in opta_timeline: text = unicodedata.normalize('NFKD', tweet['text']).encode( 'ascii', 'ignore') tweet_url = "http://twitter.com/Optajoe/status/%s" % tweet['id_str'] r.hset('opta_tweet', tweet_url, text) r.expire('opta_tweet', 86400)
def GetTweets(file, USERS=None, COUNT_DEFAULT=50, COUNT={}, **kwargs): """ COUNT_DEFAULT is the default number of tweets to get for each user, which falls back on 50. COUNT is a dictionary with with keys as tweeters and value the number of tweets to get. Falls back on COUNT_DEFAULT """ api = Twython() t = {'statuses': {}, 'users': {}} for u in USERS: try: n = COUNT[u] except: n = COUNT_DEFAULT try: user=api.showUser(u) timeline = api.getUserTimeline(id=u, count=n) t['users'][user['id']] = user t['statuses'][user['id']] = timeline except: print "User: '******' has protected tweets".format(u) pickle.dump(t, file)
def gnius(user='******'): nltk.data.load('tokenizers/punkt/spanish.pickle') stops_es=stopwords.words('spanish') stops_en=stopwords.words('english') twitter = Twython() #user=raw_input('user: '******'\nUser Description\n\n' lookup = twitter.lookupUser(screen_name=user) lookup = lookup[0]['description'].encode('ascii', 'replace') lookup_t = nltk.word_tokenize(lookup) stops_custom = ['http'] cleaned_lookup = [word.lower() for word in lookup_t if word not in stops_es and word not in stops_custom and word not in stops_en and word.isalpha() and len(word)>2] print cleaned_lookup print '\nUser Timeline\n\n' # We won't authenticate for this, but sometimes it's necessary user_timeline = twitter.getUserTimeline(screen_name=user, count='200') all_tweets='' for tweet in user_timeline: text = tweet['text'].encode('ascii', 'replace') all_tweets +=' ' + text #print '\nTexto\n\n%s' % (all_tweets) all_tweets_t = nltk.word_tokenize(all_tweets) cleaned_tweets = [word.lower() for word in all_tweets_t if word not in stops_es and word not in stops_custom and word not in stops_en and word.isalpha() and len(word)>2] print '\nTokens\n' print cleaned_tweets c = Counter(cleaned_tweets) print '\nCommon terms\n' t=c.most_common(5) d={} for x in t: d[] j=json.dumps(d) print j #t=nltk.Text(cleaned_tweets) #t=({'user':user}) return j
class TwitterServer: def __init__(self): # You can choose to latch tweets topics latch = False if rospy.has_param('latch'): latch = rospy.get_param('latch') # In case you can't direct message a user, replace DM with a public # '@user text' tweet. self.replace_dm = False if rospy.has_param('replace_dm'): self.replace_dm = rospy.get_param('replace_dm') # Publish mentions, home timeline and direct messages self.pub_home = rospy.Publisher('home_timeline', Tweets, latch = latch) self.pub_mentions = rospy.Publisher('mentions', Tweets, latch = latch) self.pub_dm = rospy.Publisher('direct_messages', Tweets, latch = latch) # Create a bridge for images conversions self.bridge = CvBridge() # Last Tweets (init values are twitter API default) self.last_mention = 12345 self.last_timeline = 12345 self.last_dm = 12345 oauth_token = None oauth_token_secret = None # Get OAuth info through parameter server if rospy.has_param('~token'): oauth_token = rospy.get_param('~token') if rospy.has_param('~token_secret'): oauth_token_secret = rospy.get_param('~token_secret') # OAuth token creation (see get_access_token.py from python-twitter) if oauth_token is None or oauth_token_secret is None: rospy.loginfo("No OAuth information given, trying to create...") t = Twython( app_key = 'HbAfkrfiw0s7Es4TVrpSuw', app_secret = 'oIjEOsEbHprUa7EOi3Mo8rNBdQlHjTGPEpGrItZj8c') # Get AUth URL. Use for login for security, url = t.get_authentication_tokens() t = Twython(app_key = 'HbAfkrfiw0s7Es4TVrpSuw', app_secret = 'oIjEOsEbHprUa7EOi3Mo8rNBdQlHjTGPEpGrItZj8c', oauth_token = url['oauth_token'], oauth_token_secret = url['oauth_token_secret']) # Open web browser on given url import webbrowser webbrowser.open( url['auth_url'] ) rospy.logwarn("Log your twitter, allow TwitROS and copy pincode.") # Wait to avoid webbrowser to corrupt raw_input rospy.sleep( rospy.Duration( 5 ) ) # Enter pincode pincode = raw_input('Pincode: ').strip() auth_props = t.get_authorized_tokens(oauth_verifier = pincode) oauth_token = auth_props['oauth_token'] oauth_token_secret = auth_props['oauth_token_secret'] rospy.loginfo("Using the following parameters for oauth: " + 'key: [{key}], '.format(key = oauth_token) + 'secret: [{secret}]'.format(secret = oauth_token_secret)) # Consumer key and secret are specific to this App. # Access token are given through OAuth for those consumer params rospy.loginfo('Trying to log into Twitter API...') # Twython self.t = Twython(app_key = 'HbAfkrfiw0s7Es4TVrpSuw', app_secret = 'oIjEOsEbHprUa7EOi3Mo8rNBdQlHjTGPEpGrItZj8c', oauth_token = oauth_token, oauth_token_secret = oauth_token_secret) result = self.t.verifyCredentials(); rospy.loginfo("Twitter connected as {name} (@{user})!" .format(name = result['name'], user = result['screen_name'])) # Stock screen name (used to show friendships) self.name = result['screen_name'] # Advertise services self.post = rospy.Service('post_tweet', Post, self.post_cb) self.retweet = rospy.Service('retweet', Id, self.retweet_cb) self.follow = rospy.Service('follow', User, self.follow_cb) self.unfollow = rospy.Service('unfollow', User, self.unfollow_cb) self.post_dm = rospy.Service('post_dm', DirectMessage, self.post_dm_cb) self.destroy = rospy.Service('destroy_dm', Id, self.destroy_cb) self.timeline = rospy.Service( 'user_timeline', Timeline, self.user_timeline_cb) # Create timers for tweet retrieval. Use oneshot and retrigger timer_home = rospy.Timer( rospy.Duration(1), self.timer_home_cb, oneshot = True ) timer_mentions = rospy.Timer( rospy.Duration(2), self.timer_mentions_cb, oneshot = True ) timer_dm = rospy.Timer( rospy.Duration(3), self.timer_dm_cb, oneshot = True ) # Tweet callback def post_cb(self, req): txt = req.text rospy.logdebug("Received a tweet: " + txt) # If only one picture, use twitter upload if len(req.images) == 1: path = self.save_image( req.images[0] ) first = True for tweet in self.split_tweet( txt ): if (req.reply_id == 0): if first: result = self.t.updateStatusWithMedia( file_ = path, status = tweet ) first = False else: result = self.t.updateStatus( status = tweet ) else: if first: result = self.t.updateStatusWithMedia( file_ = path, status = tweet, in_reply_status_id = req.reply_id ) first = False else: result = self.t.updateStatus( status = tweet, in_reply_to_status_id = req.reply_id ) # Check response for each update. if self.t.get_lastfunction_header('status') != '200 OK': return None os.system('rm -f ' + path) elif len(req.images) != 0: txt += upload( req.images ) # Publish after splitting. for tweet in self.split_tweet( txt ): if (req.reply_id == 0): result = self.t.updateStatus( status = tweet ) else: result = self.t.updateStatus( status = tweet, in_reply_to_status_id = req.reply_id ) if self.t.get_lastfunction_header('status') != '200 OK': return None return PostResponse(id = result['id']) def retweet_cb(self, req): result = self.t.retweet( id = req.id ) if self.t.get_lastfunction_header('status') != '200 OK': return None else: return IdResponse() # Does not raise an error if you are already following the user def follow_cb(self, req): rospy.logdebug("Asked to follow:" + req.user) result = self.t.createFriendship( screen_name = req.user ) if self.t.get_lastfunction_header('status') != '200 OK': return None else: return UserResponse() # Does not raise an error if you are not following the user def unfollow_cb(self, req): rospy.logdebug("Asked to unfollow:" + req.user) result = self.t.destroyFriendship( screen_name = req.user ) if self.t.get_lastfunction_header('status') != '200 OK': return None else: return UserResponse() # Send direct message. def post_dm_cb(self, req): rospy.logdebug("Received DM to " + req.user + ": " + req.text) # First, check if you can dm the user relation = self.t.showFriendship( source_screen_name = self.name, target_screen_name = req.user ) if self.t.get_lastfunction_header('status') != '200 OK': rospy.logerr("Failed to get friendship information.") return None # CASE 1: If can, send a direct message if relation['relationship']['source']['can_dm']: txt = req.text # Upload image to postimage.org using requests if len(req.images): txt += self.upload( req.images ) for dm in self.split_tweet( txt ): result = self.t.sendDirectMessage( screen_name = req.user, text = dm ) if self.t.get_lastfunction_header('status') != '200 OK': return None # Return the id of the last DM posted. return DirectMessageResponse(id = result['id']) # CASE 2: If Cant dm but allowed to tweet instead, tweet with mention elif self.replace_dm: rospy.logwarn("You can't send a direct message to " + req.user + ". Sending a public tweet instead...") # One image ---> Twitter if len(req.images) == 1 : path = self.save_image( req.images[0] ) first = True for tweet in self.split_tweet( req.text ): if first: result = self.t.updateStatusWithMedia( file_ = path, status = tweet ) first = False else: result = self.t.updateStatus( status = tweet ) if self.t.get_lastfunction_header('status') != '200 OK': return None os.system('rm -rf ' + path) # Return the id of the last DM posted. return DirectMessageResponse(id = result['id']) else: status = '@' + req.user + ' ' + req.text # Many images ---> postimage.org if len(req.images) != 0: status += upload( req.images ) for tweet in self.split_tweet( status ): result = self.t.updateStatus( status = tweet ) if self.t.get_lastfunction_header('status') != '200 OK': return None # Return the id of the last DM posted. return DirectMessageResponse(id = result['id']) # CASE 3: If can't. else: rospy.logwarn("You can't send a direct message to " + req.user) return None def destroy_cb(self, req): result = self.t.destroyDirectMessage( id = req.id ) if self.t.get_lastfunction_header('status') != '200 OK': return None else: return IdResponse() def user_timeline_cb(self, req): result = self.t.getUserTimeline( screen_name = req.user ) if self.t.get_lastfunction_header('status') != '200 OK': return None else: msg = self.process_tweets( result ) if msg: return TimelineResponse( tweets = msg ) else: rospy.logwarn(req.user + " has no tweets in its timeline.") return TimelineResponse( ) # Split a long text into 140 chars tweets def split_tweet(self, text): tweet = "" tweets = [] words = text.split(' ') for word in words: if len(tweet + word + " ") > 137: tweets.append(tweet.strip() + "...") # If tweets is intended to a user, begin all tweets with @user if text.startswith('@'): tweet = words[0] + " " + word + " " else: tweet = "..." else: tweet = tweet + word + " " tweets.append( tweet.strip() ) return tweets # Upload array of sensor_msgs/Image to postimage.org and return link. # Link is shortened if possible. def upload(self, images): files = {} paths = [] # Keep paths stored to remove files later # Construct files dict i = 0 for image in images: paths.append( self.save_image( image ) ) files['upload[{count}]'.format(count = i)] = open(paths[i], 'rb') i += 1 # Post using requests request = requests.post('http://postimage.org/index.php', files = files, params = {'optsize': 0, 'adult': 'no'}) # Cleanup saved files for path in paths: os.system('rm -rf ' + path) if not request.status_code in [301, 201, 200]: return " [FAILED UPLOAD]" # Parse HTML page returned using beautifulsoup soup = BeautifulSoup(request.text, 'html.parser') # Find the image link: Hacked that by looking at raw html file url = "" if len(images) == 1: # 1 image : find first solo link containg 'postimg' for link in soup.find_all('a'): if link.get('href').find('postimg') != -1: url = link.get('href') break else: # Many images: find the option field for gallery url for option in soup.find_all('option'): if option.get('value').find('gallery') != -1: url = option.get('value') break if not len(url): return " [FAILED PARSING OR UPLOAD]" # Shorten URL request = requests.get( 'http://is.gd/create.php', params = {'format': 'simple', 'url': url} ) if request.status_code in [301, 201, 200]: return " [" + request.text + "]" else: return " [" + url + "]" # Save a sensor_msgs/Image on /tmp and return the path def save_image(self, image): # Convert from ROS message using cv_bridge. try: cv_image = self.bridge.imgmsg_to_cv( image, desired_encoding = 'passthrough') except CvBridgeError, e: rospy.logerr(e) # Write to JPG with OpenCV path = "/tmp/pic_ul_{time}.png".format( time = time() ) cv.SaveImage( path, cv_image ) return path
import simplejson as json numTweetsPerQuery = 15 handleToInfo = {} fin = open('venueID_twitterHandle.txt', 'r') for line in fin: (venueID, twitterHandle, venueName, venueDesc, venueAddr, venueLat, venueLon, numEvents) = line.split('\t') handleToInfo[twitterHandle] = {'id': int(venueID), 'twitterHandle': twitterHandle, 'name': venueName, 'description': venueDesc, 'address': venueAddr, 'lat': float(venueLat), 'lon': float(venueLon), 'numEvents': int(numEvents)} # We won't authenticate for this, but sometimes it's necessary twitter = Twython() venues = [] for twitterHandle in handleToInfo.keys(): user_timeline = twitter.getUserTimeline(screen_name=twitterHandle, rpp=numTweetsPerQuery) search_results = twitter.searchTwitter(q=twitterHandle, rpp=numTweetsPerQuery, lang="en") venue = handleToInfo[twitterHandle] tweetsTimeline = [] tweetsSearch = [] for tweet in user_timeline: tweetsTimeline.append(tweet) for tweet in search_results["results"]: tweetsSearch.append(tweet) venue['tweetsTimeline'] = tweetsTimeline venue['tweetsSearch'] = tweetsSearch venues.append(venue) print json.dumps(venues, sort_keys=True)
if len(value) > 1: interestingPeople.append(key) logging.debug("People: %d" % numPeople) logging.debug("Interesting people: %s" % len(interestingPeople)) lastId = None updateVitals(db_status, dbname, numPeople, len(interestingPeople), None) # pull in "interesting" tweets via the REST API, slowly try: currentIteration = 0 for uid in interestingPeople: # grab user timeline currentIteration += 1 tweets = twitter.getUserTimeline(user_id=uid, count=100) # check for rate limit, back off if so try: response = tweets['error'] backoff() # reset the stream completely last_time = int(datetime.datetime.utcnow().strftime("%s")) continue except (TypeError, KeyError) as e: pass if isinstance(tweets, dict): logging.debug("Caught exception from Twitter APIs...") logging.debug("%s" % tweets) backoff() last_time = int(datetime.datetime.utcnow().strftime("%s"))
from twython import Twython # We won't authenticate for this, but sometimes it's necessary twitter = Twython() user_timeline = twitter.getUserTimeline(screen_name="ryanmcgrath") print user_timeline
numPeople += 1 if len(value) > 1: interestingPeople.append(key) logging.debug("People: %d" % numPeople) logging.debug("Interesting people: %s" % len(interestingPeople)) lastId = None updateVitals(db_status, dbname, numPeople, len(interestingPeople), None) # pull in "interesting" tweets via the REST API, slowly try: currentIteration = 0 for uid in interestingPeople: # grab user timeline currentIteration += 1 tweets = twitter.getUserTimeline(user_id=uid, count = 100) # check for rate limit, back off if so try: response = tweets['error'] backoff() # reset the stream completely last_time = int(datetime.datetime.utcnow().strftime("%s")) continue except (TypeError, KeyError) as e: pass if isinstance(tweets, dict): logging.debug("Caught exception from Twitter APIs...") logging.debug("%s" % tweets) backoff() last_time = int(datetime.datetime.utcnow().strftime("%s"))
'id': int(venueID), 'twitterHandle': twitterHandle, 'name': venueName, 'description': venueDesc, 'address': venueAddr, 'lat': float(venueLat), 'lon': float(venueLon), 'numEvents': int(numEvents) } # We won't authenticate for this, but sometimes it's necessary twitter = Twython() venues = [] for twitterHandle in handleToInfo.keys(): user_timeline = twitter.getUserTimeline(screen_name=twitterHandle, rpp=numTweetsPerQuery) search_results = twitter.searchTwitter(q=twitterHandle, rpp=numTweetsPerQuery, lang="en") venue = handleToInfo[twitterHandle] tweetsTimeline = [] tweetsSearch = [] for tweet in user_timeline: tweetsTimeline.append(tweet) for tweet in search_results["results"]: tweetsSearch.append(tweet) venue['tweetsTimeline'] = tweetsTimeline venue['tweetsSearch'] = tweetsSearch venues.append(venue) print json.dumps(venues, sort_keys=True)
from twython import Twython tw=Twython() results=tw.getUserTimeline(screen_name="TelecomixSyria") for r in results: print "-"*50 text = r['text'] date = r['created_at'] twid = r['id'] print "\n%s\n%s"%(date,text)
APP_KEY = 'xxxx' APP_KEY_SECRET = 'xxxx' ACCESS_TOKEN = 'xxxx' TOKEN_SECRET = 'xxxx' t = Twython(app_key=APP_KEY, app_secret=APP_KEY_SECRET, oauth_token=ACCESS_TOKEN, oauth_token_secret=TOKEN_SECRET) #auto post status t.update_status(status='Testing my twitter bot.') #scrape tweets and auto respond tl = t.getUserTimeline() lastTweet = tl[0] tid = str(lastTweet['id']) search = tl.searchTwitter(q="BasedGod", since_id=tid, rpp="10") for tweet in search['results']: user = tweet['from_user'] txt = tweet['text'] id = str(tweet['id']) if txt.lower() == "BasedGod" or txt.lower() == "basedgod": head = "@" + user + ""