def get_tuits_since(since_id, twitter_handle): oauth = api.get_oauth() url = "https://api.twitter.com/1.1/statuses/user_timeline.json" payload = { 'screen_name': twitter_handle, 'count': 200, 'since_id': since_id, } print payload try: sleep(4) r = requests.get(url, auth=oauth, params=payload) data = r.json() for item in data: tweet = {} tweet['tweet_id'] = item['id'] tweet['screen_name'] = item['user']['screen_name'].lower() tweet['user_id'] = item['user']['id'] tweet['status'] = item['text'] tweet['created_at'] = item['created_at'] tweet['utc_offset'] = item['user']['utc_offset'] if 'geo' in item and item['geo'] != None: tweet['latitude'] = item['geo']['coordinates'][0] tweet['longitude'] = item['geo']['coordinates'][1] print tweet upload_my_tweet(tweet) except requests.exceptions.ConnectionError as e: print("Error", e)
def tell_using_twitter(message, twitter_user): oauth = api.get_oauth() # send a @mention status = "@" + twitter_user + " " + message payload = { 'status': status, } url = "https://api.twitter.com/1.1/statuses/update.json" try: r = requests.post(url=url, auth=oauth, params=payload) except: print "Error", r.text # send a DM payload = { 'text': message, 'screen_name': twitter_user, } url = "https://api.twitter.com/1.1/direct_messages/new.json" try: r = requests.post(url=url, auth=oauth, params=payload) if 'errors' in r.text: print r.text['errors']['message'] except: print "Error", r.text
def fetch_tuits(geocode, carcel): oauth = api.get_oauth() url = "https://api.twitter.com/1.1/search/tweets.json" payload = { 'q': '', 'geocode':geocode, 'result_type': 'recent', 'count': 100 } #r = requests.get(url=url, auth=oauth, params=payload) #data = r.json() try: next_results = data['search_metadata']['next_results'] print next_results except: print "There are not next_results" next_results = "None" if next_results == "None": time.sleep(6) next_results = do_request(url, oauth, carcel, payload, geocode) while next_results != "None": time.sleep(6) url = "https://api.twitter.com/1.1/search/tweets.json" + next_results payload = None next_results = do_request(url, oauth, carcel, payload, geocode)
def get_recent_tweets(user_list): oauth = api.get_oauth() url = "https://api.twitter.com/1.1/statuses/user_timeline.json" for user in user_list: max_id = 0 new_max_id = None twitter_handle = user[1].replace("@", "") filename = twitter_handle + ".json" while max_id != new_max_id: print "\nMax_id", max_id print "New_max_id", new_max_id print user, "\n" if max_id == 0: payload = { 'screen_name': twitter_handle, 'count': 200, } elif max_id is None: print "yes none" new_max_id = None payload = { 'screen_name': twitter_handle, 'count': 200, } else: new_max_id = get_max_id(filename) max_id = new_max_id payload = { 'screen_name': twitter_handle, 'count': 200, 'max_id': max_id, } try: sleep(4) r = requests.get(url, auth=oauth, params=payload) data = r.json() for item in data: tweet = {} tweet['tweet_id'] = item['id'] tweet['screen_name'] = item['user']['screen_name'].lower() tweet['user_id'] = item['user']['id'] tweet['status'] = item['text'] tweet['created_at'] = item['created_at'] tweet['utc_offset'] = item['user']['utc_offset'] if 'geo' in item and item['geo']: tweet['latitude'] = item['geo']['coordinates'][0] tweet['longitude'] = item['geo']['coordinates'][1] f = codecs.open(filename, "a+", "utf-8") f.write(json.dumps(tweet) + "\n") f.close() except requests.exceptions.ConnectionError as e: print("Error", e) max_id = get_max_id(filename)
def main(): description = """Get the location of tuits for user""" parser = argparse.ArgumentParser(description=description) parser.add_argument('-u', '--user', action='store', metavar='twitter user handle', required=True, dest='user') args = parser.parse_args() if args.user: oauth = api.get_oauth() if os.path.isfile('user_tuits.csv'): f = open('user_tuits.csv', 'r') lines = f.readlines() f.close() last_line = lines[-1] max_id = last_line.split(",")[0] print max_id else: max_id = "" print "no file yet" # get tuits url = "https://api.twitter.com/1.1/statuses/user_timeline.json" params = { 'count': 3200, 'screen_name': args.user.strip(), 'include_rts': 'false', } if max_id != "": params['max_id'] = max_id r = requests.get(url=url, auth=oauth, params=params) data = r.json() for i in data: tuit = {} tuit['id'] = i['id'] if 'geo' in i: if i['geo'] != None: tuit['lat'] = i['geo']['coordinates'][0] tuit['long'] = i['geo']['coordinates'][1] if 'lat' in tuit: tuit['datetime'] = i['created_at'] csv = dict_to_csv(tuit) f = open('user_tuits.csv', 'a') f.write(csv) f.close()
def find_and_retuit(): """ For each jail, find tweets inside and retweet """ import codecs import requests import config import os.path import dataset import lib import api oauth = api.get_oauth() db = dataset.connect("sqlite:///" + os.path.join(config.local_folder, "tuits.db")) f = codecs.open(os.path.join(config.local_folder, "carceles_limites.csv")) data = f.readlines() f.close() retuits = [] for i in data: i = i.strip() if i.startswith("Carcel,"): continue else: i = i.split(",") penal = i[0] poly = [] for j in i[1:]: j = j.split(" ") coord = (float(j[0]), float(j[1])) poly.append(coord) # get tweet ids res = db.query("select * from tuits where carcel='" + penal +"' and retuited='no'") for i in res: if lib.tuit_inside_jail(i['status_id'], poly) == True: url = "https://api.twitter.com/1.1/statuses/retweet/" url += str(i['status_id']) + ".json" try: r = requests.post(url=url, auth=oauth) print "Retuited %i" % i['status_id'] retuits.append(i['status_id']) except: print "Error", r.text table = db['tuits'] for i in retuits: data = dict(status_id=i, retuited="yes", in_jail="yes") print data table.update(data, ['status_id'])
def get_profile_image_url(user_list): oauth = api.get_oauth() for user in user_list: url = "https://api.twitter.com/1.1/users/show.json" screen_name = user[1].replace("@", "") payload = { 'screen_name': screen_name, } r = requests.get(url, auth=oauth, params=payload) profile_url = r.json()['profile_image_url'] download_profile_image(profile_url, screen_name)
def main(): description = """Get the location of tuits for user""" parser = argparse.ArgumentParser(description=description) parser.add_argument('-u', '--user', action='store', metavar='twitter user handle', required=True, dest='user') args = parser.parse_args() if args.user: oauth = api.get_oauth() if os.path.isfile('user_tuits.csv'): f = open('user_tuits.csv', 'r') lines = f.readlines() f.close() last_line = lines[-1] max_id = last_line.split(",")[0] print max_id else: max_id = "" print "no file yet" # get tuits url = "https://api.twitter.com/1.1/statuses/user_timeline.json" params = {'count': 3200, 'screen_name': args.user.strip(), 'include_rts': 'false', } if max_id != "": params['max_id'] = max_id r = requests.get(url=url, auth=oauth, params=params) data = r.json() for i in data: tuit = {} tuit['id'] = i['id'] if 'geo' in i: if i['geo'] != None: tuit['lat'] = i['geo']['coordinates'][0] tuit['long'] = i['geo']['coordinates'][1] if 'lat' in tuit: tuit['datetime'] = i['created_at'] csv = dict_to_csv(tuit) f = open('user_tuits.csv', 'a') f.write(csv) f.close()
def retweet(i): oauth = api.get_oauth() status = "#publicidadRestringida? RT " + i['screen_name'].upper() status += " " + i['status'][0:80] #status += " " + "https://twitter.com/" + i['screen_name'] #status += "/status/" + str(i['tweet_id']) url = "https://api.twitter.com/1.1/statuses/update.json" payload = { 'status': status, } try: r = requests.post(url, auth=oauth, params=payload) # print r.json() except requests.exceptions.ConnectionError as e: print("Error", e)
def update_status(status, reply_to_tweet): print status status = convert_emoticons(status.decode('utf-8')) oauth = api.get_oauth() payload = dict() if reply_to_tweet: tweet_id = re.search("/([0-9]+)", reply_to_tweet).groups()[0] payload['in_reply_to_status_id'] = tweet_id reply_user = re.search(".com/(\w+)/status", reply_to_tweet).groups()[0] status = "@" + reply_user + " " + status url = "https://api.twitter.com/1.1/statuses/update.json" payload['status'] = status print payload r = requests.post(url, auth=oauth, params=payload) print r.json()['text']
#!/usr/bin/env python # -*- coding: utf-8 -*- """ For each jail, find tweets inside and retweet """ import codecs import requests import config import os.path import dataset import lib import api oauth = api.get_oauth() db = dataset.connect("sqlite:///" + os.path.join(config.local_folder, "tuits.db")) f = codecs.open(os.path.join(config.local_folder, "carceles_limites.csv")) data = f.readlines() f.close() retuits = [] for i in data: i = i.strip() if i.startswith("Carcel,"): continue else: i = i.split(",") penal = i[0]
def connect_twitter(self): return api.get_oauth()
#!/usr/bin/env python # -*- coding: utf-8 -*- import sys import config import api import requests import lib if len(sys.argv) < 3: print "This script inserts 1 tuit into our database using the status_id value and a string for the jail" print "Usage python insert_tuit.py 123208309281908 'Penal Diroes'" sys.exit() oauth = api.get_oauth() # get tuit data url = "https://api.twitter.com/1.1/statuses/show.json" payload = {'id': sys.argv[1].strip()} r = requests.get(url=url, auth=oauth, params=payload) data = r.json() obj = {} obj['carcel'] = unicode(sys.argv[2].strip(), "utf-8") obj['created_at'] = data['created_at'] obj['screen_name'] = data['user']['screen_name'] obj['status_id'] = data['id'] obj['text'] = data['text'] obj['user_id'] = data['user']['id']