"""ex 1-3 retrieving Twitter API trends""" import twitter twitter_api = twitter.Twitter(domain="api.twitter.com", api_version='1') trends = twitter_api.trends() # print trends print [trend['name'] for trend in trends['trends'] ] # it's weird that I input the codes separately, it works. # but it dones't work together. # I add print to fix this bug. """"exe1-4 paging through twitter search results""" search_results=[] twitter_search = twitter.Twitter(domain="search.twitter.com") for page in range(1,6): search_results.append(twitter_search.search(q="Victoria Secret", rpp=400, page=page)) # i make page range less 2 pages to save time. # range(1,3) only output page 1 and page 2, rather than 3 pages. # I make rpp=10, with 1 results per page, to save time.
def twitter_authenticate(self): self.twitter_api = twitter.Twitter(auth=self.auth)
def get_stream(self): return twitter.Twitter(auth=self.auth, secure=1, api_version='1.1', domain='api.twitter.com')
import scraperwiki import Twitter # Blank Python import json, operator, twitter #construct twitter API object searchApi = twitter.Twitter(domain="search.twitter.com") #Get Trends query = "#Doprah" #start with an empty dictionary and put values in tweeters=dict() for i in range (1,16): response = searchApi.search(q=query, rpp=100, page=i) tweets = response ['results'] for item in tweets: tweet = json.loads(json.dumps(item)) user = tweet['from_user_name'] if user in tweeters: tweeters[user] += 1 else: tweeters [user] = 1 print len(tweeters) sorted_tweeters = sorted(tweeters.iteritems(), key=operator.itemgetter(1),reverse=True) print sorted_tweeters[0:10] print 'done'import scraperwiki import Twitter
def get_api(): return twitter.Twitter( auth=twitter.OAuth(config["oauth_token"], config["oauth_secret"], config["consumer_key"], config["consumer_secret"]))
news_uri = 'https://newsapi.org/v1/articles' news_sources = [ 'the-wall-street-journal', 'new-scientist', 'techcrunch', 'the-new-york-times', 'ars-technica', 'reddit-r-all' ] news_source = cycle(news_sources) publish = partial(mqtt_publish.single, 'display_info', hostname=aws_host, retain=False, port=1883, keepalive=60) twit = twitter.Twitter( auth=twitter.OAuth(slz_twitter_oauth_token, slz_twitter_oauth_token_secret, slz_twitter_CONSUMER_KEY, slz_twitter_CONSUMER_SECRET)) stock_info = [] session = remote_session context_name = "programming" def check(): now = datetime.datetime.now() if now.weekday() > 4: return False if now.hour > 17 or now.hour < 9: return False
import twitter, json, cPickle, nltk twitter_search = twitter.Twitter(domain="search.twitter.com") search_results = [] for page in range(1,10): search_results.append(twitter_search.search(q="AnonymousIRC", rpp=100, page=page)) json.dumps(search_results, sort_keys=True, indent=1) tweets = [ r['text'] for result in search_results for r in result['results']] # Frequency Analysis and Lexical Diversity words = [] for t in tweets: words += [ w for w in t.split() ] # Pickle our Twitter Search Space product f = open("AnonymousIRC.pickle", "wb") cPickle.dump(words, f) f.close() # Python Natural Language Processing Tool Kit Analysis words = cPickle.load(open("AnonymousIRC.pickle")) freq_dist = nltk.FreqDist(words) print("===") print("Conducting Frequency and Lexical Diversity Analysis of Twitter Search Space: ") print("===") print("Number of words within the twitter search space: ") print(len(words)) print("Number of unique words within twitter search space: ") print(len(set(words)))
# massage field name to match production twitter api ht['text'] = ht['hashtag'] del ht['hashtag'] entities['hashtags'].append(ht) entities['urls'] = [] for url in extractor.extract_urls_with_indices(): entities['urls'].append(url) return entities # Fetch a tweet using an API method of your choice and mixin the entities t = twitter.Twitter(domain='api.twitter.com', api_version='1') tweet = t.statuses.show(id=TWEET_ID) tweet['entities'] = getEntities(tweet) print json.dumps(tweet, indent=4) # <markdowncell> # Example 5-3. Harvesting tweets from a user or public timeline (the_tweet__harvest_timeline.py) # <codecell> import sys import time
NoMoreTweets = False # Checks when no more tweets are avilable EndOfResearch = False # True only if research ended and no fatal errors occurred ###### Beginning the cycle on 15 min sessions ############## for times in range(i_0, i_0 + inputs['max_files']): missing_tweets = 0 # Checks internet connection if not tbf.internet_on(): NoConnection = not tbf.wait_connection() # Initiate the connection to Twitter REST API try: oauth = tbf.access_from_file(inputs['access_file']) twitter = tw.Twitter(auth=oauth) except Exception as cazzillo: print('Trying again, {}'.format(cazzillo)) oauth = tbf.access_from_file(inputs['access_file']) twitter = tw.Twitter(auth=oauth) outfile = cart + search_label + '_' + lab.format(times) + '.dat' file_out = open(outfile, 'w') print('File: {}\n'.format(outfile)) # Waits for the 180 requests to be available ok_180, restano = tbf.wait_requests(twitter, sleep_time=time_sleep, wait=True, interval=60, n_trials=15,
CONSUMER_KEY = 'YCedfIMFa4QGjw9HiZtpIXuHv' CONSUMER_SECRET = 'dtYyCu2271U6N53CahJEcko6nx3ZT3RwnO6L6IwUF82n3tu4nM' def oauth_login(app_name=APP_NAME, consumer_key=CONSUMER_KEY, consumer_secret=CONSUMER_SECRET, token_file='out/twitter.oauth'): try: (access_token, access_token_secret) = read_token_file(token_file) except IOError, e: (access_token, access_token_secret) = oauth_dance(app_name, consumer_key, consumer_secret) if not os.path.isdir('out'): os.mkdir('out') write_token_file(token_file, access_token, access_token_secret) print >> sys.stderr, "OAuth Success. Token file stored to", token_file return twitter.Twitter(auth=twitter.oauth.OAuth( access_token, access_token_secret, consumer_key, consumer_secret)) if __name__ == '__main__': oauth_login(APP_NAME, CONSUMER_KEY, CONSUMER_SECRET)
def success(self, auth): api = twitter.Twitter(auth=auth) user = api.account.verify_credentials() return HttpResponse(user['screen_name'])
creds used. """ msg = MIMEText(str(ErrorMessage)) msg['from'] = SCRAPEREMAIL msg['to'] = ERROREMAIL msg['subject'] = 'Error in Metadata script' s = smtplib.SMTP('smtp.live.com', 25) s.ehlo() s.starttls() s.login(SCRAPEREMAIL, PASSWORD) s.sendmail(SCRAPEREMAIL, [ERROREMAIL], msg.as_string()) s.quit() if __name__ == "__main__": TwitterAPI = twitter.Twitter(auth=twitter.OAuth( ACCESS_KEY_TOKEN, ACCESS_KEY_SECRET, CONSUMER_KEY, CONSUMER_SECRET)) Start = time.time() Path = os.getcwd() os.chdir(os.path.join(Path, WriteDir)) CurrentDirectory = os.getcwd() with requests.Session() as Session: Resp = Session.get(URL, headers=Headers) # get auth token soup = BeautifulSoup(Resp.content, "lxml") AUTH_TOKEN = soup.select_one("input[name=authenticity_token]")["value"] # update data, post and you are logged in. data["authenticity_token"] = AUTH_TOKEN Resp = Session.post(LoginPost, data=data, headers=Headers) UsersWithPoliticalAds = {} #ScreenName:UserID
from views.google_views import login_required TWITTER_OAUTH_KEY = os.getenv("TWITTER_OAUTH_KEY") TWITTER_OAUTH_SECRET = os.getenv("TWITTER_OAUTH_SECRET") TWITTER_ACCESS_TOKEN = os.getenv("TWITTER_ACCESS_TOKEN") TWITTER_ACCESS_TOKEN_SECRET = os.getenv("TWITTER_ACCESS_TOKEN_SECRET") twitter_view = Blueprint('twitter_login', __name__) twitter_bp = make_twitter_blueprint(api_key=TWITTER_OAUTH_KEY, api_secret=TWITTER_OAUTH_SECRET, redirect_to='twitter_login.log_in_twitter') api = twitter.Twitter( auth=twitter.OAuth(consumer_key=TWITTER_OAUTH_KEY, consumer_secret=TWITTER_OAUTH_SECRET, token=TWITTER_ACCESS_TOKEN, token_secret=TWITTER_ACCESS_TOKEN_SECRET)) @twitter_view.route("/login") @login_required def log_in_twitter(user): if not twitter_dance.authorized: return redirect(url_for("twitter.login")) tag = "@" + twitter_dance.get( "account/settings.json").json()['screen_name'] facade.set_user_twitter_tag(user, tag) return redirect(url_for('profile.profile'))
OAUTH_TOKEN = keyDict['access_token_key'] OAUTH_SECRET = keyDict['access_token_secret'] CONSUMER_KEY = keyDict['consumer_key'] CONSUMER_SECRET = keyDict['consumer_secret'] o1 = OAuth(OAUTH_TOKEN, OAUTH_SECRET, CONSUMER_KEY, CONSUMER_SECRET) t = Twitter(auth=o1) # see "Authentication" section below for tokens and keys t = Twitter( auth=OAuth(OAUTH_TOKEN, OAUTH_SECRET, CONSUMER_KEY, CONSUMER_SECRET)) # Get your "home" timeline t.statuses.home_timeline() twttr = twitter.Twitter(auth=twitter.OAuth(OAUTH_TOKEN, OAUTH_SECRET, CONSUMER_KEY, CONSUMER_SECRET)) # Get a particular friend's timeline t.statuses.friends_timeline(id="billybob") # Also supported (but totally weird) t.statuses.friends_timeline.billybob() # to pass in GET/POST parameters, such as `count` t.statuses.home_timeline(count=5) # to pass in the GET/POST parameter `id` you need to use `_id` t.statuses.oembed(_id=1234567890) # Update your status t.statuses.update(status="Using @sixohsix's sweet Python Twitter Tools.")
#file path to twitter credentials oauth_filename = 'twitter_oauth' #check to see if doesn't exist if not os.path.exists(oauth_filename): #create the file by getting authorisation from twitter twitter.oauth_dance("see documentation", CONSUMER_KEY, CONSUMER_SECRET, oauth_filename) #get the authorisation tokens from the file oauth_token, oauth_token_secret = twitter.read_token_file(oauth_filename) #log in to use the twitter_api auth = twitter.OAuth(oauth_token, oauth_token_secret, CONSUMER_KEY, CONSUMER_SECRET) twitter_api = twitter.Twitter(domain="api.twitter.com", api_version='1.1', auth=auth) #get the followers of the logged in user followers = twitter_api.followers.ids() followers = followers['ids'] follower_data = {} #get the data for each follower api_calls = 0 for follower_id in followers: try: data = twitter_api.friendships.lookup( user_id="{0}".format(follower_id)) api_calls += 1 #create a new key in the dictionary and add relevant data as value follower_data[str(follower_id)] = data[0]
def oauth_login(oauth_token, oauth_token_secret, consumer_key, consumer_secret): auth = twitter.oauth.OAuth(oauth_token, oauth_token_secret, consumer_key, consumer_secret) return twitter.Twitter(auth=auth)
def main(): # MongoDB Configuration client = pymongo.MongoClient() db = client.dedup # Taking the values from the DB, to be changed for generic users authtemp = db.appkeys.find() authval = {} for item in authtemp: authval = copy.deepcopy(item) consumer_key = authval['consumer_key'] consumer_secret = authval['consumer_secret'] oauth_token = authval['oauth_token'] oauth_token_secret = authval['oauth_token_secret'] auth = twitter.oauth.OAuth(oauth_token, oauth_token_secret, consumer_key, consumer_secret) t = twitter.Twitter(auth=auth) verificationDetails = t.account.verify_credentials() authuser = verificationDetails['screen_name'] # Counter for the last tweet in our DB sinceCounter = None try: getLast = db.last.find({}).sort("_id", -1).limit(1) for item in getLast: sinceCounter = item['lastTweet'] except: pass if sinceCounter is None: completeTimeline = t.statuses.home_timeline(count=200) else: completeTimeline = t.statuses.home_timeline(count=200, since_id=sinceCounter) if(len(completeTimeline)) > 0: refinedTweet = [] for tweet in completeTimeline: refinedTweet.append(Refine(tweet, authuser)) # refining for inserting in MongoDB -- converting the set to list! mongoRefined = copy.deepcopy(refinedTweet) for item in mongoRefined: item['cleanWords'] = list(item['cleanWords']) # Refined Tweets are Cached in MongoDB currentCount = 0 for item in mongoRefined: try: db.refined.insert(item) except: pass # Get the total number of tweets in refined collection try: currentCount = db.refined.count() except: pass lastTweet = completeTimeline[0]['id'] endTweet = {'lastTweet': lastTweet, 'created_on': datetime.now(), 'utc_timestamp': datetime.utcnow(), 'authuser': authuser, 'currentCount': currentCount} db.last.insert(endTweet) # In order to expire documents after every 'n' hrs (documents in 'refined' collection) num_hrs = 24 db.refined.ensure_index("utc_timestamp", expireAfterSeconds=num_hrs * 60 * 60) # In order to expire documents after every 'n' days (documents in 'last' collection) num_days = 2 db.last.ensure_index("utc_timestamp", expireAfterSeconds=num_days * 24 * 60 * 60)
import csv # package for csv files import twitter # package for twitter import re # package for regular expressions # Twitter credentials # Please, try to get your own credentials. In case that you had problems # getting the credentials, you can use the credentials below by default. CONSUMER_KEY = 'mSIB6HsL3vJnUq4dJFRkuMdgE' CONSUMER_SECRET = 'l6gKb7Nzm2dSuM6J0WozK5SGVvBdV7drlvwJMKDhINvaebvOFE' OAUTH_TOKEN = '365628507-vTziO7gDaDTCeCtn4IppIQAje3dQ4sLmIsUmjTGQ' OAUTH_TOKEN_SECRET = 'yz5NkkWd3IORVI7kH9LIiowf8uo3KNPSzCHKVEDTmGaee' auth = twitter.oauth.OAuth(OAUTH_TOKEN, OAUTH_TOKEN_SECRET, CONSUMER_KEY, CONSUMER_SECRET) twitter_api = twitter.Twitter(auth=auth) # We need to pass the auth parameter search_word = '#starwars' # We will look for '#starwars' count = 100 # Add the parameters for search tweets (q is the parameter for query...) search_results = twitter_api.search.tweets(q=search_word, count=count, lang='es') # The results are in the 'statuses' key statuses = search_results['statuses'] # print the 'statuses[0].keys()' variable to find the keys to get the results print(statuses[0].keys()) # We create a csv, with columns 'user', 'text', 'geo', 'coordinates', # 'favorited' and 'hashtags'
#!/usr/bin/python import os.path import requests import twitter import twitter.cmdline from bs4 import BeautifulSoup home = "http://twitter.com/lol_o_clock" print(home) links_url = "http://www.b3ta.com/links/popular/" homemade_url = "http://www.b3ta.com/links/popular/?i=1" # b3tan's own oauth = twitter.OAuth(*twitter.read_token_file(os.path.expanduser('~/.twitter_oauth')) + (twitter.cmdline.CONSUMER_KEY, twitter.cmdline.CONSUMER_SECRET)) bird = twitter.Twitter(domain='api.twitter.com',auth=oauth, api_version='1.1') def tweet(message): bird.statuses.update(status=message) for url in [homemade_url]: response = requests.get(url) response.raise_for_status() page = response.content soup = BeautifulSoup(page) posts = soup.findAll('div',attrs={'class':['post1','post2']}) for post in posts: anchors = post.findAll('a') anchor = anchors[2]
def handle(event, context): dynamodb = boto3.resource('dynamodb') table = dynamodb.Table(config.dynamodb_table) now = int(calendar.timegm(time.gmtime())) # Read the blacklist of verses we've posted recently response = table.scan( FilterExpression=Attr('last_updated').gt(now - BLACKLIST_SECS)) for i in response['Items']: print "Adding %s to blacklist (posted %.1f days ago)" % ( i["verse"], (now - i["last_updated"]) / (60 * 60 * 24)) BLACKLIST.add(i['verse']) # Read the XML verses file doc = None with open('proverbs.xml') as fd: doc = xmltodict.parse(fd.read()) root = doc['bible']['b']['c'] # Keep track of number of verses per chapter and the total number chapters = {} verses = 0 for chapter in root: chapters[chapter["@n"]] = len(chapter["v"]) verses += len(chapter["v"]) # Pick a verse to post, skip duplicates and blacklisted verses while True: chapter_idx = random.randint(1, len(chapters)) verse_idx = random.randint(1, len(root[chapter_idx - 1]['v'])) verse_key = "%s:%s" % (chapter_idx, verse_idx) if verse_key not in BLACKLIST: break verse = root[chapter_idx - 1]['v'][verse_idx - 1]['#text'] # Post to twitter new_status = "%s (Proverbs %s:%s NIV)" % (verse, chapter_idx, verse_idx) print("status: %s" % new_status) t = twitter.Twitter( auth=twitter.OAuth(config.access_key, config.access_secret, config.consumer_key, config.consumer_secret)) results = t.statuses.update(status=new_status) # Update DynamoDB with tinmestamp for the verse we just posted table.update_item(Key={ "verse": "%s:%s" % (chapter_idx, verse_idx), }, UpdateExpression='SET last_updated = :val1', ExpressionAttributeValues={ ':val1': now, }) return "OK"
print("""\ You need to ... pip install twitter If pip is not found you might have to install it using easy_install. If it does not work on your system, you might want to follow instructions at https://github.com/sixohsix/twitter, most likely: $ git clone https://github.com/sixohsix/twitter $ cd twitter $ sudo python setup.py install """) sys.exit(1) from twitterauth import CONSUMER_KEY, CONSUMER_SECRET, ACCESS_TOKEN_KEY, ACCESS_TOKEN_SECRET api = twitter.Twitter(auth=twitter.OAuth(consumer_key=CONSUMER_KEY, consumer_secret=CONSUMER_SECRET, token=ACCESS_TOKEN_KEY, token_secret=ACCESS_TOKEN_SECRET)) DATA_PATH = "data" # for some reasons TWeets disappear. In this file we collect those MISSING_ID_FILE = os.path.join(DATA_PATH, "missing.tsv") NOT_AUTHORIZED_ID_FILE = os.path.join(DATA_PATH, "not_authorized.tsv") def get_user_params(DATA_PATH): user_params = {} # get user input params user_params['inList'] = os.path.join(DATA_PATH, 'corpus.csv')
def __init__(self, conf_file): self.f = flickr.Flickr(conf_file) self.t = twitter.Twitter(conf_file)
def login(): #loging to twitter application APP_NAME = "Finance_Harvest" CONSUMER_KEY = "Bkvk7JFZmzaVpLjGAWBtxQ" CONSUMER_SECRET = "y9tugnYJeU8aMNW44o6hwWHC3QVktCYtW3RDm3Mdk" TOKEN_FILE = 'out/twitter.oauth' try: (oauth_token, oauth_token_secret) = read_token_file(TOKEN_FILE) except IOError, e: print e.errno, e.strerror (oauth_token, oauth_token_secret) = oauth_dance(APP_NAME, CONSUMER_KEY, CONSUMER_SECRET) if not os.path.isdir('out'): os.mkdir('out') write_token_file(TOKEN_FILE, oauth_token, oauth_token_secret) return twitter.Twitter(domain='api.twitter.com', api_version='1.1', auth=twitter.oauth.OAuth(oauth_token, oauth_token_secret, CONSUMER_KEY, CONSUMER_SECRET)) if __name__ == "__main__": login()
import twitter try: #enter your twitter api info below: con_secret = '' con_secret_key = '' token = '' token_key = '' t=twitter.Twitter(auth=twitter.OAuth(token, token_key, con_secret, con_secret_key)) statupdate=raw_input('Enter text that you want to tweet: ') x=t.statuses.update(status=statupdate) x2=t.statuses.user_timeline(screen_name="@RohanGautam13",count=1, include_rts=False) if x2[0]['text']==statupdate:print 'Tweeted successfully!🎉' else : print 'Status updation failed' except twitter.api.TwitterHTTPError: print 'Status updation failed: Duplicate status entered'
def initTwitterAgent(self, file, consumer_key, consumer_secret): oauth_token, oauth_secret = twitter_api.read_token_file(file) return twitter_api.Twitter(auth=twitter_api.OAuth( oauth_token, oauth_secret, consumer_key, consumer_secret))
# -*- coding: utf-8 -*- """ Created on Sun May 23 16:46:11 2021 @author: soumya """ import twitter consumer_key = "<Your Consumer Key Here>" consumer_secret = "<Your Consumer Secret Here>" access_token = "<Your Access Token Here>" access_token_secret = "<Your Access Token Secret Here>" authorization = twitter.OAuth(access_token, access_token_secret, consumer_key, consumer_secret) t = twitter.Twitter(auth=authorization, retry=True) import os data_folder = os.path.join(os.path.expanduser("~"), "Data", "twitter") output_filename = os.path.join(data_folder, "python_tweets.json") import json original_users = [] tweets = [] user_ids = {} search_results = t.search.tweets(q="python", count=100)['statuses'] for tweet in search_results: if 'text' in tweet:
consumer_key = "BziIwaK6XQDdStrR4FsrJSieY" consumer_secret = "a1xvk9XGvN4Tq5ZnA5et9NasEgAY2LoUQebqXTqx8pB5QqMg2H" oauth_token = "3214717593-TUH2kQYqyDE6zEhfX3suwiYimT6zA9gaRyMKa5F" oauth_secret = "0AXIV614eZ7nvpTtHo8taWJ2m8yf4OFiK8LNIwMuMHMjR" skips = set(stopwords.words('english')) skips |= set(['""', "''", ' ', '-', ' - ']) q = "#realDonaldTrump" count = 25 auth = twitter.oauth.OAuth(oauth_token, oauth_secret, consumer_key, consumer_secret) tw = twitter.Twitter(auth=auth) search = tw.search.tweets(q=q, count=count, lang='en') statuses = search['statuses'] overall_sentiment = 0.0 def getLexicalDiversity(someString): words = someString.split() numUnique = len(set(words)) numTotal = len(words) return ((1.0 * numUnique) / numTotal) for status in statuses:
def tweetboard(list_owner, list_name): global TWITTER if TWITTER is None: try: access_token = data['access_token'] access_token_secret = data['access_token_secret'] except KeyError: # Older YAMLs access_token = data['oauth_token'] access_token_secret = data['oauth_token_secret'] TWITTER = twitter.Twitter(auth=twitter.OAuth( access_token, access_token_secret, data['consumer_key'], data['consumer_secret'])) users = get_list_members(list_owner, list_name) print(''' <html> <head> <title>Tweetboard</title> <style type="text/css"> body { background-color: lightsteelblue; font-family: sans-serif; } .danger, .danger a:link, .danger a:visited { color: red; font-weight: bold; } .warning, .warning a:link, .warning a:visited { color: orange; font-weight: bold; } li { background-color: white; border: 1px solid #000; display: inline-block; margin: 5px; min-height: 250px; padding: 5px; vertical-align: top; width: 240px; } .tweet div { padding-bottom: 10px; } .screen_name { text-align: center; } .status { word-break: break-word; } .stats { font-size: smaller; } .stats span { display: block; } </style> </head> <body> <ol> ''') now = time.time() for user in users: # pprint(user) if 'status' not in user: continue status = user['status'] # pprint(status) created_at = status['created_at'] timestamp = calendar.timegm(time.strptime( created_at, "%a %b %d %H:%M:%S +0000 %Y")) seconds = now-timestamp m, s = divmod(seconds, 60) h, m = divmod(m, 60) ago = "%dh %02dm ago" % (h, m) extra_classes = "" if h > 24: extra_classes = "danger" elif h > 12: extra_classes = "warning" text = status['text'].replace("\n", "<br>") # "Mon Jun 08 11:23:45 +0000 2015" created = user['created_at'] # "08 Jun 2015" created = created[8:11] + created[4:7] + created[-5:] tweets = commafy(user['statuses_count']) following = commafy(user['friends_count']) followers = commafy(user['followers_count']) user_link = "https://twitter.com/" + user['screen_name'] status_link = user_link + "/status/" + status['id_str'] status_a_href = '<a href="' + status_link + '" target="twitter">' print(' <li><div class="tweet ' + extra_classes + '">') print(' <div class="screen_name"><a href="' + user_link + '" target="twitter">@' + user['screen_name'] + '</a></div>') print_it(' <div class="status">' + text + '</div>') print(' <div class="created_at">' + status_a_href + status['created_at'] + '</a></div>') print(' <div class="ago">' + status_a_href + ago + '</a></div>') print(' <div class="stats">') print(' <span class="created">Created: ' + created + '</span>') print(' <span class="tweets">Tweets: ' + tweets + '</span>') print(' <span class="following">Following: ' + following + '</span>') print(' <span class="followers">Followers: ' + followers + '</span>') print(" </div>") print(" </li>") print(''' </ol> </body> </html> ''')
import twitter, json # load config from twitter_config import * auth = twitter.oauth.OAuth(OAUTH_TOKEN, OAUTH_TOKEN_SECRET, CONSUMER_KEY, CONSUMER_SECRET) twitter_api = twitter.Twitter(auth=auth) q = 'Brexit' count = 100 # See https://dev.twitter.com/docs/api/1.1/get/search/tweets search_results = twitter_api.search.tweets(q=q, count=count) statuses = search_results['statuses'] for tweet in statuses: print tweet['text'].encode('utf-8')
def test(): #run speedtest-cli print('running test') a = os.popen("python ~/speedtest-cli --simple").read() print('ran') #split the 3 line result (ping,down,up) lines = a.split('\n') print(a) ts = time.time() date = datetime.datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') #if speedtest could not connect set the speeds to 0 if "Cannot" in a: p = 100 d = 0 u = 0 #extract the values for ping down and up values else: p = lines[0][6:11] d = lines[1][10:16] u = lines[2][8:14] print(date, p, d, u) #save the data to file for local network plotting out_file = open('/var/www/data.csv', 'a') writer = csv.writer(out_file) writer.writerow((date, p, d, u)) #writer.writerow((ts*1000,p,d,u)) out_file.close() #connect to twitter TOKEN = "[insert token]" TOKEN_SECRET = "[insert secret]" CON_KEY = "[insert key]" CON_SECRET = "[insert secret]" my_auth = twitter.OAuth(TOKEN, TOKEN_SECRET, CON_KEY, CON_SECRET) twit = twitter.Twitter(auth=my_auth) #try to tweet if speedtest couldnt even connet. Probably wont work if the internet is down if "Cannot" in a: try: tweet = "Hey @fragspeak why is my internet down? I pay for 100down\\50up in Washington DC?" twit.statuses.update(status=tweet) except: pass # tweet if down speed is less than whatever I set elif eval(d) < 50: print("trying to tweet") try: # i know there must be a better way than to do (str(int(eval()))) tweet = "Hey @fragspeak why is my internet speed " + str( int(eval(d)) ) + "down\\" + str( int(eval(u)) ) + "up when I pay for 100down\\50up in Washington DC? #fios #speedtest" twit.statuses.update(status=tweet) except: # Exception,e: #print (str(e)) pass return