def get_all_tweets(username,keyword): #1st:build a folder to store the images folder = os.path.exists('../'+username+'/'+username+'_images') if not folder: os.makedirs('../'+username+'/'+username+'_images') #makedirs print ("--- Building new folder... ---") else: print ("--- There is this folder! ---") #authorize twitter, initialize tweepy if consumer_key == "" or consumer_secret == "" or access_key == "" or access_secret == "": list_word = [] with open('test.json') as f: listword= json.load(f) for i in listword: list_word.append(i) print(list_word) #for line in f: # list_word.append(list(line.strip('\n').split(','))) return list_word else: auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_key, access_secret) api = tweepy.API(auth) #initialize a list to hold all the tweepy Tweets alltweets = [] #index for max_id in search oldest = -1 #keep grabbing tweets until it hits 100 count (the result isn't not necessarily 100 tweets) while len(alltweets) < 5: #get new tweets from api search new_tweets = api.search(q = keyword,count = 10,max_id = str(oldest),include_entities = False) #save most recent tweets alltweets.extend(new_tweets) #update the id of the oldest oldest = new_tweets[-1].id-1 if(not new_tweets): break #print ("...%s tweets downloaded so far" % (len(alltweets))) print("%s tweets downloaded in total." % (len(alltweets))) list_word = [] #for i in range(0,20): # x.append(username+str(i)+".txt") x = '../'+username+'/'+username+".txt" f = open(x,"a+") for j in alltweets: #j = j.encode('ascii', 'ignore').decode('ascii') list_word.append(j.text) #print(j.text, "\n", file = f) #print the first 20 tweets to txt files f.close() #for j in range(0,20): #f = open(x[j],"a+") # print(alltweets[j].text, "\n", file = f) #print the first 20 tweets to txt files # f.close() return list_word
def retweet_sched(sc): writeLog("Running retweeter...") es = Elasticsearch([{'host': '127.0.0.1', 'port': 9500}]) tw_counter = 0 err_counter = 0 follow_counter = 0 # create bot DEVSBOTECU now = datetime.datetime.now() xminsago = datetime.datetime.now() - datetime.timedelta(hours=24) now = now - timedelta(hours=5) xminsago = xminsago - timedelta(hours=5) writeLog(unicode(now)) auth = tweepy.OAuthHandler( "KHhIPqvhqfihoOiQxpg6oSFbL", "iXpqm5tkEESkiZAGerEEuiPLlBdXTqDrGfJHhUb2ZNa0KQsnWB") auth.set_access_token("765271929657892864-IToT3ieMDz9uI3sIhoPMfqAqBzKD8Hk", "9TBuWycoylX90wPL1rOFblMbSLjx4Hgaq1UZXfGD3D4EW") api = tweepy.API(auth) res = es.search( index="ecuador-index", body={ "sort": [{ "retweeted_status.retweet_count": { "order": "desc" } }], "aggs": { "2": { "terms": { "field": "retweeted_status.user.screen_name", "size": 5 }, "aggs": { "1": { "max": { "field": "retweeted_status.retweet_count" } } } }, "3": { "max": { "field": "retweeted_status.retweet_count" } } }, "size": 200, "query": { "filtered": { "query": { "query_string": { "analyze_wildcard": True, "query": "*" } }, "filter": { "bool": { "must": [{ "query": { "filtered": { "query": { "query_string": { "analyze_wildcard": True, "fields": ["retweeted_status.user.id"], "query": "760145761946497000 OR 375369761 OR 802207195572084000 OR 913131817 OR 315377387 OR 18661588 OR 24974978 OR 300390462" } }, "filter": { "range": { "retweeted_status.created_at": { "gte": xminsago, "lte": now, "time_zone": "-5:00" } } } } } }], "must_not": [] } } } } }) writeLog("Got %d Hits:" % res['hits']['total']) tweetText = "Bip! Los candidatos más retuiteados en las últimas 24 horas:\n" j = 1 for hit in res['aggregations']['2']['buckets']: try: writeLog(hit["key"]) tweetText += str(j) + ".@" + str(hit["key"]) + "\n" tw_counter += 1 j += 1 except tweepy.error.TweepError as e: # just in case tweet got deleted in the meantime or already retweeted err_counter += 1 writeLog(str(e)) writeLog(tweetText) api.update_status(status=tweetText) writeLog("Finished. %d Tweets retweeted, %d errors occured." % (tw_counter, err_counter)) sc.enter(86400, 1, retweet_sched, (sc, ))
def get_api_handler(cfg): auth = tweepy.OAuthHandler(cfg['consumer_key'], cfg['consumer_secret']) auth.set_access_token(cfg['access_token'], cfg['access_token_secret']) return tweepy.API(auth)
addr1 = socket.gethostbyname('google.com') addr2 = socket.gethostbyname('yahoo.com') addr3 = socket.gethostbyname('facebook.com') print(addr1) print(addr2) print(addr3) #question3 from keys import consumer_secret, consumer_key, access_secret, access_token import tweepy oauth = tweepy.OAuthHandler(consumer_key, consumer_secret) oauth.set_access_token(access_token, access_secret) api = tweepy.API(oauth) user = api.search('#race3') #question4 #A Library is a chunk of code that you can call from your own code, to help you do things more quickly/easily. For example, a Bitmap Processing library will provide facilities for loading and manipulating bitmap images, saving you having to write all that code for yourself. Typically a library will only offer one area of functionality (processing images or operating on zip files) #An API (application programming interface) is a term meaning the functions/methods in a library that you can call to ask it to do things for you - the interface to the library. #question5 from keys import consumer_key, consumer_secret, access_secret, access_token import tweepy oauth = tweepy.OAuthHandler(consumer_key, consumer_secret) oauth.set_access_token(access_token, access_secret)
import xlrd import tweepy #api keys to access twitter endpoints CONSUMER_KEY = "" CONSUMER_SECRET = "" ACCESS_TOKEN = "" ACCESS_TOKEN_SECRET = "" #initializes tweepy auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET) auth.set_access_token(ACCESS_TOKEN, ACCESS_TOKEN_SECRET) api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) def convert(): #empty list to store all of the @handles handles = [] workbook = xlrd.open_workbook(r'twitterbot.xlsx', on_demand = True) worksheet = workbook.sheet_by_index(0) first_row = [] # The row where we stock the name of the column for col in range(worksheet.ncols): first_row.append( worksheet.cell_value(0,col) ) # transform the workbook to a list of dictionaries data =[] for row in range(1, worksheet.nrows): elm = {} for col in range(worksheet.ncols): elm[first_row[col]]=worksheet.cell_value(row,col)
def start(): auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) api = tweepy.API(auth) stream = listen(api) stream._thread.join()
def setUp(self): auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) self.client = tweepy.API(auth) self.dirname = os.path.dirname(__file__)
def module(twitter_handle, number_tweets): # Twitter #----------------------------------------------------------------------------------------------------# # Consumer Information # consumer_key = '*' # consumer_secret = '*' # access_token = '*' # access_secret = '*' # Authorization auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) api = tweepy.API(auth) DIRECTORY = os.getcwd() # Checking if there is already an output movie file os.system('rm output.mp4') # Gathering twitter data try: tweets = api.user_timeline( screen_name=twitter_handle, # Gather first set of tweets count=number_tweets, include_rts=False, exclude_replies=True) except: print('The given username does not exist. \n') return 'The given username does not exist.' max_id = tweets[-1].id # Traversing tweets and finding those with images media_files = set() for status in tweets: if len(media_files) > 10: # Maxes out a 10 images break media = status.entities.get('media', []) if (len(media) > 0): media_files.add(media[0]['media_url']) # Downloading images if len(media_files) == 0: print('There are no images in these tweets') return 'There are no images in these tweets' for media_file in media_files: wget.download(media_file) #FFMPEG #-----------------------------------------------------------------------------------------------------# # Converting all images that were downloaded into a single video file os.system('cat *.jpg | ffmpeg -f image2pipe -framerate .5 -i - output.mp4') #Google #-----------------------------------------------------------------------------------------------------# # For Google API authorization, set GOOGLE_APPLICATION_CREDENTIALS within .bash file labels_dict = {} path = glob.glob('*.jpg') client = vision.ImageAnnotatorClient() count = 0 for img in path: with io.open(img, 'rb') as image_file: content = image_file.read() image = vision.types.Image(content=content) response = client.label_detection(image=image) labels = response.label_annotations labels_dict[count] = [] for label in labels: labels_dict[count].append(label.description) count += 1 print(labels_dict) return labels_dict
def tweeting(consumer_key, consumer_secret, my_access_token, my_access_token_secret, carrier): # Authentication my_auth = tweepy.OAuthHandler(consumer_key, consumer_secret) my_auth.set_access_token(my_access_token, my_access_token_secret) my_api = tweepy.API(my_auth) # tweet if carrier == 'reportediario': my_positividad = pd.read_csv( '../output/producto49/Positividad_Diaria_Media_T.csv') my_mediamovil = pd.read_csv( '../output/producto75/MediaMovil_casos_nuevos_T.csv') mediamovil_nacional = int( pd.to_numeric(my_mediamovil.iloc[my_mediamovil.index.max()][17])) variacion_nacional = float( 100 * (pd.to_numeric(my_mediamovil.iloc[my_mediamovil.index.max()][17]) - pd.to_numeric( my_mediamovil.iloc[my_mediamovil.index.max() - 7][17])) / pd.to_numeric(my_mediamovil.iloc[my_mediamovil.index.max()][17])) positividad_nacional = float( 100 * pd.to_numeric(my_positividad.iloc[my_positividad.index.max()][4])) variacion_positividad = float( 100 * (pd.to_numeric(my_positividad.iloc[my_positividad.index.max()][4]) - pd.to_numeric( my_positividad.iloc[my_positividad.index.max() - 7][4])) / pd.to_numeric(my_positividad.iloc[my_positividad.index.max()][4])) positividad_nacional = ("%.2f" % positividad_nacional) positividad = float( 100 * pd.to_numeric(my_positividad.iloc[my_positividad.index.max()][3])) positividad_hoy = ("%.2f" % positividad) casos_nuevos = str( int(my_positividad.iloc[my_positividad.index.max()][2])) muestras = str(int(my_positividad.iloc[my_positividad.index.max()][1])) # create update elements tweet_text = '🤖Actualicé el reporte diario del @ministeriosalud de hoy 💫, gracias a la SubsecretarÃa de Salud Pública y de Redes Asistenciales. Hay ' + str( mediamovil_nacional ) + ' casos nuevos promedio en los últimos 7 dÃas, con positividad de ' + str( positividad_nacional ) + '%. Más detalles en los productos en la imagen. https://github.com/MinCiencia/Datos-COVID19' reply2_text = '🤖El total de casos nuevos para hoy es ' + casos_nuevos + '. De las ' + muestras + ' muestras que se analizaron en las últimas 24 horas en laboratorios nacionales, un ' + positividad_hoy + '% resultó positivo.' if variacion_nacional >= 0 and variacion_positividad >= 0: variacion_nacional = ("%.2f" % variacion_nacional) variacion_positividad = ("%.2f" % variacion_positividad) reply1_text = '🤖 En comparación con la semana anterior, la media móvil de los últimos 7 dÃas para casos nuevos creció en ' + str( variacion_nacional ) + '% y la positividad en ' + str( variacion_positividad ) + '% a nivel nacional. Detalles a nivel regional en: https://github.com/MinCiencia/Datos-COVID19/tree/master/output/producto75 y https://github.com/MinCiencia/Datos-COVID19/tree/master/output/producto49' elif variacion_nacional >= 0 and variacion_positividad < 0: variacion_nacional = ("%.2f" % variacion_nacional) variacion_positividad = ("%.2f" % variacion_positividad) reply1_text = '🤖 En comparación con la semana anterior, la media móvil de los últimos 7 dÃas para casos nuevos creció en ' + str( variacion_nacional ) + '% y la positividad bajó en ' + str( variacion_positividad ) + '% a nivel nacional. Detalles a nivel regional en: https://github.com/MinCiencia/Datos-COVID19/tree/master/output/producto75 y https://github.com/MinCiencia/Datos-COVID19/tree/master/output/producto49' elif variacion_nacional < 0 and variacion_positividad < 0: variacion_nacional = ("%.2f" % variacion_nacional) variacion_positividad = ("%.2f" % variacion_positividad) reply1_text = '🤖 En comparación con la semana anterior, la media móvil de los últimos 7 dÃas para casos nuevos creció en ' + str( variacion_nacional ) + '% y la positividad en ' + str( variacion_positividad ) + '% a nivel nacional. Detalles a nivel regional en: https://github.com/MinCiencia/Datos-COVID19/tree/master/output/producto75 y https://github.com/MinCiencia/Datos-COVID19/tree/master/output/producto49' elif variacion_nacional < 0 and variacion_positividad >= 0: variacion_nacional = ("%.2f" % variacion_nacional) variacion_positividad = ("%.2f" % variacion_positividad) reply1_text = '🤖 En comparación con la semana anterior, la media móvil de los últimos 7 dÃas para casos nuevos bajó en ' + str( variacion_nacional ) + '% y la positividad aumentó en ' + str( variacion_positividad ) + '% a nivel nacional. Detalles a nivel regional en: https://github.com/MinCiencia/Datos-COVID19/tree/master/output/producto75 y https://github.com/MinCiencia/Datos-COVID19/tree/master/output/producto49' # Generate text tweet with media (image) media1 = my_api.media_upload('./img/Datos covid_Bot_A_g1.png') media2 = my_api.media_upload('./img/Datos covid_Bot_A_g2.png') media3 = my_api.media_upload('./img/Datos covid_Bot_A_g3.png') media4 = my_api.media_upload('./img/Datos covid_Bot_A_g4.png') tweet = my_api.update_status(status=tweet_text, media_ids=[ media1.media_id, media2.media_id, media3.media_id, media4.media_id ]) my_api.update_status(status=reply1_text, in_reply_to_status_id=tweet.id) my_api.update_status(status=reply2_text, in_reply_to_status_id=tweet.id) elif carrier == 'mmamp': # create update elements tweet_text = '🤖Actualicé los datos de calidad del aire en todo el territorio nacional, desde las estaciones del SINCA del @MMAChile 💫. Mira especÃficamente qué actualicé en la imagen y clona el GitHub https://github.com/MinCiencia/Datos-COVID19' media1 = my_api.media_upload('./img/Datos covid_Bot_G_g1.png') # media2= my_api.media_upload('./img/Datos covid_Bot_A_g2.png') # media3= my_api.media_upload('./img/Datos covid_Bot_A_g3.png') # media4= my_api.media_upload('./img/Datos covid_Bot_A_g4.png') # Generate text tweet with media (image) my_api.update_status(status=tweet_text, media_ids=[media1.media_id]) elif carrier == 'informeepi': my_epi = pd.read_csv('../output/producto1/Covid-19_T.csv') fecha_informe = my_epi.iloc[my_epi.index.max() - 1][0] # create update elements tweet_text = '🤖Actualicé los datos del Informe Epidemiológico publicado por @ministeriosalud de hoy 💫, con los datos correspondientes al ' + fecha_informe + '. Gracias al equipo de especialistas en epidemiologÃa. Mira qué actualicé en la imagen y clona el GitHub https://github.com/MinCiencia/Datos-COVID19' media1 = my_api.media_upload('./img/Datos covid_Bot_B_g1.png') media2 = my_api.media_upload('./img/Datos covid_Bot_B_g2.png') media3 = my_api.media_upload('./img/Datos covid_Bot_B_g3.png') media4 = my_api.media_upload('./img/Datos covid_Bot_B_g4.png') # Generate text tweet with media (image) my_api.update_status(status=tweet_text, media_ids=[ media1.media_id, media2.media_id, media3.media_id, media4.media_id ]) elif carrier == 'vacunacion': now = datetime.datetime.now() my_vacunacion = pd.read_csv('../output/producto76/vacunacion_t.csv') vacunados = int( pd.to_numeric(my_vacunacion.iloc[my_vacunacion.index.max()][1])) vacunados_pauta_completa = int( pd.to_numeric(my_vacunacion.iloc[my_vacunacion.index.max()][2])) my_vacunacion_avance = 100 * vacunados / 15000000 my_vacunacion_avance_pauta_completa = 100 * vacunados_pauta_completa / 15000000 my_vacunacion_avance = ("%.2f" % my_vacunacion_avance) my_vacunacion_avance_pauta_completa = ( "%.2f" % my_vacunacion_avance_pauta_completa) dosis_dia = vacunados + vacunados_pauta_completa - ( pd.to_numeric(my_vacunacion.iloc[my_vacunacion.index.max() - 1][1]) + pd.to_numeric( my_vacunacion.iloc[my_vacunacion.index.max() - 1][2])) # create update elements tweet_text = '🤖Actualicé los datos que muestran el avance en la campaña de vacunación #YoMeVacuno de hoy 💫, gracias a APS y DIPLAS, @ministeriosalud. Van ' + str( vacunados ) + ' vacunados con primera dosis en 🇨🇱. Mira especÃficamente qué actualicé en la imagen y clona el github https://github.com/MinCiencia/Datos-COVID19' reply1_text = '🤖Además, un total de ' + str( vacunados_pauta_completa ) + ' personas tienen pauta completa. En 🇨🇱, un ' + my_vacunacion_avance + '% tiene al menos una dosis, y un ' + my_vacunacion_avance_pauta_completa + '% completó su pauta de vacunación. Detalles en https://github.com/MinCiencia/Datos-COVID19' if now.hour > 20: time = '7 pm' else: time = '11 am' #time = '7 pm' reply2_text = '🤖 A las ' + time + ' del ' + my_vacunacion.iloc[ my_vacunacion.index.max()][0] + ', un total de ' + str( int(dosis_dia) ) + ' personas han recibido la vacuna contra COVID-19 hoy. Detalles por comuna, edad y prioridad en https://github.com/MinCiencia/Datos-COVID19' media1 = my_api.media_upload('./img/Datos covid_Bot_C_g1.png') # media2= my_api.media_upload('./img/Datos covid_Bot_A_g2.png') # media3= my_api.media_upload('./img/Datos covid_Bot_A_g3.png') # media4= my_api.media_upload('./img/Datos covid_Bot_A_g4.png') # Generate text tweet with media (image) tweet = my_api.update_status(status=tweet_text, media_ids=[media1.media_id]) tweet2 = my_api.update_status(status=reply1_text, in_reply_to_status_id=tweet.id) my_api.update_status(status=reply2_text, in_reply_to_status_id=tweet2.id) elif carrier == 'testeo': tweet_text = "Actualicé los datos del informe de testeo y trazabilidad del @ministeriosalud de hoy 💫, ¡gracias @FunCienciayVida! Mira especÃficamente qué actualicé en la imagen, y clónate el github https://github.com/MinCiencia/Datos-COVID19" media1 = my_api.media_upload('./img/Datos covid_Bot_D_g1.png') # media2= my_api.media_upload('./img/Datos covid_Bot_A_g2.png') # media3= my_api.media_upload('./img/Datos covid_Bot_A_g3.png') # media4= my_api.media_upload('./img/Datos covid_Bot_A_g4.png') # Generate text tweet with media (image) my_api.update_status( status=tweet_text, media_ids=[ media1.media_id # media2.media_id, # media3.media_id, # media4.media_id ])
#!/usr/bin/env python3 import markovify import tweepy # credentials to login to twitter api consumer_key = '' consumer_secret = '' access_token = '' access_secret = '' # login to twitter account api auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) tp = tweepy.API(auth) # get raw text as string with open("rod.txt", encoding="utf-8") as f: text = f.read() # build model text_model = markovify.Text(text) # markovify sentence + hashtag (toghether = 140 characters) tweet = text_model.make_short_sentence(127) + "\n#TwilightZone" # send to twitter tp.update_status(tweet)
def __init__(self): self.auth = tweepy.OAuthHandler( settings.CONSUMER_KEY, settings.CONSUMER_SECRET) self.auth.set_access_token(settings.ACCESS_TOKEN, settings.ACCESS_SECRET) self.api = tweepy.API(self.auth)
import tweepy as tw import time import gspread from oauth2client.service_account import ServiceAccountCredentials # KEYS PARA CONSEGUIR OS DADOS DO TWITTER consumer_key = 'rdbpiuOr01zwgLwzdCb5SltXP' consumer_secret = 'e7hIO2AHYKP3XlVy8BvtkZlj7De9vlnM3ytAN3YibfKwv4ISsA' access_token = '1542145388-8cKUsqQsV3HvvTfHwQ267bLN060y1brXqRtznl0' access_token_secret = 'RG35wzsPMP3FnJjI9kFWIJswi250iDkdVWMSZkn36P0Yd' # AUTENTICAÇÃO TWITTER auth = tw.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) api = tw.API(auth, wait_on_rate_limit=True) deputados = [ [ "@abou_anni", "@AcacioFavacho", "@AdolfoViana_", "@adrianasounovo", "@adrianodobaldy", "@AecioNeves", "@AfonsoFlorence", "@DepAfonsoHamm", "@afonso_motta", "@depaguinaldo11", "@FaleiroAirton", "@Alan_Rick", "@Alceu_Moreira", "@oficialalesilva", "@AlencarBraga13", "@alessandromolon", "@AlexManentePPS", "@depalexsantana", "@alefrotabrasil", "@lexandreleite", "@padilhando", "@AleSerfiotis", "@AlexisFonteyne", "@Alice_Portugal", "@alielmachado", "@dep_alinegurgel", "@AlineSleutjes", "@Altineu", "@AluisioMendesMA" ], [ "@amaronetotv", "@aferreira2020", "@andrepdt12", "@DepAndreFufuca", "@AndreJanonesAdv", "@DeputadaAngela", "@AntonioBrito_", "@achinaglia", "@ArnaldoJardim", "@aroldomartins", "@ArthurLira_", "@DepArthurMaia", "@assis_carvalho", "@deputadoatila", "@dep_acoutinho",
print("User UTC Offset \t:" + str(tweet.user.utc_offset)) print("User Status count \t:" + str(tweet.user.statuses_count)) print("User Description \t:", tweet.user.description) print("User Follower count \t:" + str(tweet.user.followers_count)) print("User Created at \t:" + str(tweet.user.created_at)) if __name__ == '__main__': # Get access and key from another class consumer_key='VGqIqCx4SzyF3MekfmjOydNdp' consumer_secret='xevr5Auxcs0kAEBB0k8GBLPX97quIgtiKDuLv39hTNhwm3KHkC' access_token_key='841302681708765184-mb3KdzpclXXneJGDi9w60zTe3PaiWDH' access_token_secret='1jWTRLHiZ55nKgiiAgMLewicnNlpzQqVv6sOm0VlhGxkD' # Authentication auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token_key, access_token_secret) api = tweepy.API(auth, wait_on_rate_limit=True, wait_on_rate_limit_notify=True, retry_count=10, retry_delay=5, retry_errors=5) streamListener = TwitterStreamListener() myStream = tweepy.Stream(auth=api.auth, listener=streamListener) print '1' myStream.filter(track=['michael'], async=True) print '2'
def get_api(self): auth = tweepy.OAuthHandler(twitter_cred.api_key, twitter_cred.api_key_secret) auth.set_access_token(twitter_cred.access_token, twitter_cred.access_token_secret) return tweepy.API(auth, parser=tweepy.parsers.JSONParser())
def __init__(self): auth = tweepy.OAuthHandler(settings.consumer_key, settings.consumer_secret) auth.set_access_token(settings.access_token, settings.access_token_secret) self.api = tweepy.API(auth)
def main(): auth = OAuthHandler(ckey, csecret) auth.set_access_token(atoken, asecret) api = tweepy.API(auth) print "Testes search" get_db(api)
def API_response(): api = tweepy.API(authorize_twitter()) return api
def get_all_tweets(screen_name): #Twitter only allows access to a users most recent 3240 tweets with this method consumer_key = TwitterFriend.app.config['API_KEY'] consumer_secret = TwitterFriend.app.config['API_SECRET'] user_token = flask.session['token'] #authorize twitter, initialize tweepy auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(user_token[0], user_token[1]) api = tweepy.API(auth) #initialize a list to hold all the tweepy Tweets alltweets = [] tweets = tweepy.Cursor(api.user_timeline, screen_name=screen_name, count=200).items() while True: try: tweet = next(tweets) except tweepy.TweepError: print("Reached API limit. Sleeping now.") time.sleep(60 * 15) tweet = next(tweets) except StopIteration: break # Append username/ fullname pair to list: alltweets.append(tweet) # PROCESSED JSON OUTPUT: # transform tweepy tweets into json # alltweets is list of type tweepy.models.Status tweets = [ ] #TODO Still includes some replies (for deleted tweets or private users) retweets = [] replies = [] # TODO full text of the tweet is not there for tweet in alltweets: tweet_info = {} tweet_info['id_str'] = tweet.id_str tweet_info['created'] = tweet.created_at.strftime( "%d-%b-%Y (%H:%M:%S.%f)") if len(tweet.text) > 0: # clean tweet content: text = clean_tweet(tweet.text) tweet_info['text'] = text if tweet.retweeted == True: retweets.append(tweet_info) elif tweet.in_reply_to_screen_name != None: replies.append(tweet_info) # Catch replies to a deleted tweet or changed username # Need to check original text because @ mentions are cleaned from tweets elif len(tweet.text) > 0 and tweet.text[0] == '@': replies.append(tweet_info) else: tweets.append(tweet_info) data = {} data['tweets'] = tweets data['retweets'] = retweets data['replies'] = replies return data
def get_all_tweets(screen_name): #Twitter only allows access to a users most recent 3240 tweets with this method #authorize twitter, initialize tweepy @classmethod def parse(cls, api, raw): status = cls.first_parse(api, raw) setattr(status, 'json', json.dumps(raw)) return status # Status() is the data model for a tweet tweepy.models.Status.first_parse = tweepy.models.Status.parse tweepy.models.Status.parse = parse # User() is the data model for a user profil tweepy.models.User.first_parse = tweepy.models.User.parse tweepy.models.User.parse = parse # You need to do it for all the models you need auth = OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) api = tweepy.API(auth) #initialize a list to hold all the tweepy Tweets alltweets = [] #make initial request for most recent tweets (200 is the maximum allowed count) new_tweets = api.user_timeline(screen_name=screen_name, count=10) #save most recent tweets alltweets.extend(new_tweets) #save the id of the oldest tweet less one oldest = alltweets[-1].id - 1 #keep grabbing tweets until there are no tweets left to grab while len(new_tweets) > 0: #all subsiquent requests use the max_id param to prevent duplicates new_tweets = api.user_timeline(screen_name=screen_name, count=10, max_id=oldest) #save most recent tweets alltweets.extend(new_tweets) #update the id of the oldest tweet less one oldest = alltweets[-1].id - 1 if (len(alltweets) > 15): break print("...%s tweets downloaded so far" % (len(alltweets))) #write tweet objects to JSON #file = open('tweet.json', 'w') #print "Writing tweet objects to JSON please wait..." #for status in alltweets: # json.dump(status._json,file,sort_keys = True,indent = 4) media_files = set() for status in alltweets: media = status.entities.get('media', []) if (len(media) > 0): media_files.add(media[0]['media_url']) #close the file print(media_files) #download image media_names = set() for media_file in media_files: filename = media_file.split("/")[-1] media_names.add(filename) wget.download(media_file) #file.close() print(media_names) #convert image to video for filename in media_names: output = filename.replace(".jpg", ".mp4") cmd = "ffmpeg -loop 1 -i " + filename + " -c:a libfdk_aac -ar 44100 -ac 2 -vf \"scale='if(gt(a,16/9),1280,-1)\':\'if(gt(a,16/9),-1,720)\', pad=1280:720:(ow-iw)/2:(oh-ih)/2\" -c:v libx264 -b:v 10M -pix_fmt yuv420p -r 30 -shortest -avoid_negative_ts make_zero -fflags +genpts -t 1 " + output os.system(cmd) #describe the content of the images # Create a Vision client. vision_client = google.cloud.vision.ImageAnnotatorClient() file = open('resul.txt', 'w') #vision_client = vision.Client() # TODO (Developer): Replace this with the name of the local image # file to analyze. for image_file_name in media_names: with io.open(image_file_name, 'rb') as image_file: content = image_file.read() # Use Vision to label the image based on content. image = google.cloud.vision.types.Image(content=content) response = vision_client.label_detection(image=image) file.write("Labels for " + image_file_name + " :\n") #print('Labels:') for label in response.label_annotations: #print(label.description) file.write(label.description + "\n")
import tweepy from config import buildKey from log import writeLog api = tweepy.API(buildKey()) user = api.me() def retweet(str, num=5): for tweet in tweepy.Cursor(api.search, str).items(num): try: if tweet.user.screen_name != user.screen_name: tweet.favorite() tweet.retweet() writeLog('Retweeted tweet by: ' + tweet.user.screen_name) except tweepy.TweepError as e: writeLog(e.reason) except StopInteration: break retweet("#GameDev", 5)
#self.logprint("consumer key/secret:", self.cfg.get('twitter_consumer_key'), self.cfg.get('twitter_consumer_secret')) #self.logprint("ouath token/secret:", self.cfg.get('twitter_oauth_token'), self.cfg.get('twitter_oauth_token_secret')) try: self.auth = tweepy.OAuthHandler( self.cfg.get('twitter_consumer_key'), self.cfg.get('twitter_consumer_secret')) self.auth.set_access_token( self.cfg.get('twitter_oauth_token'), self.cfg.get('twitter_oauth_token_secret')) streamtwitter = self.cfg.get_bool('twitter_stream') #username = self.cfg.get('twitter_username') #password = self.cfg.get('twitter_password') except KeyError, ke: print "Couldn't find twitter authentication information in config file:", ke sys.exit(1) self.twit = tweepy.API(self.auth) # Listen to Twitter stream. try: if streamtwitter: self.stream_twitter() else: self.twitter_loop() except KeyboardInterrupt: print "Quitting..." sys.exit(0) def stream_twitter(self): listener = Listener(self.twit) listener.add_handler(self.handle_tweet) stream = tweepy.streaming.Stream(self.auth, listener) #, secure=True)
settings.close() def OAuth(): try: auth = tweepy.OAuthHandler(authorisation_information[0].strip(), authorisation_information[1].strip()) auth.set_access_token(authorisation_information[2].strip(), authorisation_information[3].strip()) return auth except Exception as e: return None initial_setup() Api = tweepy.API(OAuth()) def post_video(): uploaded = False if not uploaded: print("Uploading Video...") upload_result = Api.media_upload("completed/{}.mp4".format( MuxingClass.song_name[:-4])) print("Video uploaded, waiting for processing...") uploaded = True sleep(15)
"""Getting tweets and users from the Twitter DB""" from os import getenv import tweepy import spacy from .models import User, DB, Tweet TWITTER_AUTH = tweepy.OAuthHandler(getenv("TWITTER_API_KEY"), getenv("TWITTER_API_KEY_SECRET")) TWITTER = tweepy.API(TWITTER_AUTH) # loads word2vect Model nlp = spacy.load("my_model") def vectorize_tweet(tweet_text): return nlp(tweet_text).vector def add_or_update_user(username): """ Gets twitter user and tweets from twitter DB Gets user by "username" parameter. """ try: # gets back twitter user object twitter_user = TWITTER.get_user(username) # Either updates or adds user to our DB db_user = (User.query.get(twitter_user.id)) or User(id=twitter_user.id, name=username) DB.session.add(db_user) # Add user if don't exist
def create_API(self): # OAuth process, using the keys and tokens auth = tweepy.OAuthHandler(self.consumer_key, self.consumer_secret) auth.set_access_token(self.access_token, self.access_secret) # create rest API self.api = tweepy.API(auth)
class UseTwitter: time_tweet = "" username = "" request = "" flag = 0 #認証 consumer_key = "E7PLfevrHNVI9ozuQlF6xXDgG" consumer_secret = "CfobRZOWqXvPytSbn3zUDklsS81bUWXJjd3RIEYcUKnOnd1kQx" access_token = "721661385977909249-wtx82HRTTSzm1mJgOnW0vbjR13tWCOr" access_secret = "uFeckhWHjU5JHBOGIvPLEHQgSbfRk3SjrxUBMyLNRNh7X" auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_secret) api = tweepy.API(auth) ################ ## POST TWEET ## ################ def post_tweet(self, message): #投稿する self.api.update_status(message) ################ ## POST TWEET WITH IMAGE ## ################ def post_tweet_image(self, reply_id, message, image_path): #投稿する self.api.update_with_media(in_reply_to_status_id=reply_id, filename=image_path, status=message) ################## ## GET TWEET SC ## ################## def get_tweet_sc(self, loopno): filename = 0 if self.api.home_timeline()[loopno].text.find('@sif_notify_bot') != -1: tweetimage = self.api.home_timeline( )[loopno].entities['media'][0]['media_url'] + ":orig" self.time_tweet = self.api.home_timeline()[loopno].created_at self.time_tweet += datetime.timedelta(hours=9) self.time_tweet = self.time_tweet.strftime(time_format) self.username = self.api.home_timeline()[loopno].author.screen_name print "time_tweet:" + self.time_tweet print "author:@" + self.username filename = 'tweetsc.jpg' urllib.urlretrieve(tweetimage, os.path.join('/var/www/up/', filename)) else: print "no update info" if self.time_tweet != '': print "get SC from tweet" return filename else: return 0 #################### ## GET GATCHA REQ ## #################### def get_gatcha_request(self): if self.api.home_timeline()[0].text.find('@sif_notify_bot') != -1: self.request = self.api.home_timeline()[0].text self.time_tweet = self.api.home_timeline()[0].created_at self.time_tweet += datetime.timedelta(hours=9) self.time_tweet = self.time_tweet.strftime(time_format) self.username = self.api.home_timeline()[0].author.screen_name print "time_tweet:" + self.time_tweet print "author:@" + self.username print "request:" + self.request flag = 1 else: flag = 0 print "no update info" return flag ################ ## GET BORDER ## ################ def get_border(self): print "get_border" tweet = self.api.user_timeline("sifjp_trackbot")[0].text #print tweet line = tweet.split("\n") T1 = line[1].split(u': ')[1].split(" (")[0] T2 = line[2].split(u': ')[1].split(" (")[0] T3 = line[3].split(u': ')[1].split(" (")[0] time = line[4].split(u' ')[1] + " " + line[4].split(u' ')[2] if tweet != '': return [time, T1, T2, T3] else: return [0, 0, 0, 0]
def get_twitter_auth(self): auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(TWITTER_ACCESS_KEY, TWITTER_ACCESS_SECRET) api = tweepy.API(auth, retry_count=3, retry_delay=5, wait_on_rate_limit=True) return api
print(*objects, sep=sep, end=end, file=file) else: f = lambda obj: str(obj).encode(enc, errors='backslashreplace').decode( enc) print(*map(f, objects), sep=sep, end=end, file=file) consumer_key = twitter_info.consumer_key ## take access info/keys/tokens from file twitter_info consumer_secret = twitter_info.consumer_secret access_token = twitter_info.access_token access_token_secret = twitter_info.access_token_secret auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth.set_access_token(access_token, access_token_secret) # Set up library to grab stuff from twitter with your authentication, and return it in a JSON format api = tweepy.API(auth, parser=tweepy.parsers.JSONParser()) # And we've provided the setup for your cache. But we haven't written any functions for you, so you have to be sure that any function that gets data from the internet relies on caching. cache_fname = 'twitterfile.json' # String for your file. We want the JSON file type, bcause that way, we can easily get the information into a Python dictionary! try: cache_file = open(cache_fname, 'r') # Try to read the data from the file cache_contents = cache_file.read() # If it's there, get it into a string cache_contents.close( ) # Close the file, we're good, we got the data in a dictionary cache_diction = json.loads( cache_contents) # And then load it into a dictionary except: cache_diction = { } ##if not in the file still create empty dict to insert cache or tweet scrape
def DownloadData(self): consumerKey = '-----------------------------------------------------' consumerSecret = '--------------------------------------------------' accessToken = '-----------------------------------------------------' accessTokenSecret = '-----------------------------------------------' auth = tweepy.OAuthHandler(consumerKey, consumerSecret) auth.set_access_token(accessToken, accessTokenSecret) api = tweepy.API(auth) auth = tweepy.OAuthHandler(consumerKey, consumerSecret) auth.set_access_token(accessToken, accessTokenSecret) api = tweepy.API(auth) searchTerm = e1.get() NoOfTerms = int(e2.get()) self.tweets = tweepy.Cursor(api.search, q=searchTerm, lang = "en").items(NoOfTerms) polarity = 0 positive = 0 stronglypositive = 0 negative = 0 stronglynegative = 0 neutral = 0 csvFile = open('results_neutral.csv', 'w') csvFile1 = open('results_pos.csv','w') csvFile2 = open('results_neg.csv','w') csvWriter = csv.writer(csvFile) csvWriter1 = csv.writer(csvFile1) csvWriter2 = csv.writer(csvFile2) for tweet in self.tweets: self.tweetText.append(self.cleanTweet(tweet.text).encode('utf-8')) analysis = TextBlob(tweet.text) polarity += analysis.sentiment.polarity if (analysis.sentiment.polarity == 0): neutral += 1 csvWriter.writerow(self.tweetText) elif (analysis.sentiment.polarity > 0.0 and analysis.sentiment.polarity <= 0.6): positive += 1 csvWriter1.writerow(self.tweetText) elif (analysis.sentiment.polarity > 0.6 and analysis.sentiment.polarity <= 1): stronglypositive += 1 csvWriter1.writerow(self.tweetText) elif (analysis.sentiment.polarity > -0.6 and analysis.sentiment.polarity <= 0.00): negative += 1 csvWriter2.writerow(self.tweetText) elif (analysis.sentiment.polarity > -1 and analysis.sentiment.polarity <= -0.6): stronglynegative += 1 csvWriter2.writerow(self.tweetText) csvFile.close() csvFile1.close() csvFile2.close() csvFile = open('results123.csv', 'w') csvWriter = csv.writer(csvFile) for tweet in self.tweets: self.tweetText.append(self.cleanTweet(tweet.text).encode('utf-8')) analysis = TextBlob(tweet.text) polarity += analysis.sentiment.polarity if (analysis.sentiment.polarity == 0): neutral += 1 elif (analysis.sentiment.polarity > 0.0 and analysis.sentiment.polarity <= 0.6): positive += 1 elif (analysis.sentiment.polarity > 0.6 and analysis.sentiment.polarity <= 1): stronglypositive += 1 elif (analysis.sentiment.polarity > -0.6 and analysis.sentiment.polarity <= 0.00): negative += 1 elif (analysis.sentiment.polarity > -1 and analysis.sentiment.polarity <= -0.6): stronglynegative += 1 csvWriter.writerow(self.tweetText) csvFile.close() positive = self.percentage(positive, NoOfTerms) stronglypositive = self.percentage(stronglypositive, NoOfTerms) negative = self.percentage(negative, NoOfTerms) stronglynegative = self.percentage(stronglynegative, NoOfTerms) neutral = self.percentage(neutral, NoOfTerms) polarity = polarity / NoOfTerms global res if (polarity == 0): res = "Neutral" elif (polarity > 0 and polarity <= 0.6): res = "Positive" elif (polarity > 0.6 and polarity <= 1): res = "Strongly Positive" elif (polarity > -0.6 and polarity <= 0): res = "Negative" elif (polarity > -1 and polarity <= -0.6): res = "Strongly Negative" self.plotPieChart(positive, stronglypositive, negative, stronglynegative, neutral, searchTerm, NoOfTerms) root = Tk() root.title('general report') label = Label(root,text = "general result") label.grid(row = 0,column = 0) label1 = Label(root, text = res) label1.grid(row = 0,column = 3) label2 = Label(root,text = "percentages : ") label2.grid(row = 1,column= 2) label3 = Label(root,text = "extremely positive : ") label3.grid(row = 2, column = 0) label4 = Label(root,text = stronglypositive ) label4.grid(row = 2, column = 3) label5 = Label(root,text = "positive : ") label5.grid(row = 3, column = 0) label6 = Label(root,text = positive) label6.grid(row = 3, column = 3) label7 = Label(root,text = "neutral : ") label7.grid(row = 4, column = 0) label8 = Label(root,text = neutral) label8.grid(row = 4, column = 3) label9 = Label(root,text = "negative : ") label9.grid(row = 5, column = 0) label10 = Label(root,text = negative) label10.grid(row = 5, column = 3) label11 = Label(root,text = "extremely negative : ") label11.grid(row = 6,column = 0) label12 = Label(root,text = stronglynegative) label12.grid(row = 6, column = 3) button = Button(root,text = "close",command = root.destroy) button.grid(row = 9,column = 1) button1 = Button(root,text = "neutral",command = sa.showneutral) button1.grid(row = 7,column =0) button2 = Button(root,text = "positive",command = sa.showpositive) button2.grid(row = 7, column= 1) button3 = Button(root,text = "negative",command = sa.shownegative) button3.grid(row = 7,column = 2) button4 = Button(root,text = "show piechart",command = sa.showchart) button4.grid(row=9,column = 0)
import tweepy import webbrowser import time from secrets import api_key, api_secret, access_token_secret, access_token from setup import topicName # auth = tweepy.OAuthHandler(api_key, api_secret) # auth = tweepy.OAuthHandler(consumer_key, consumer_secret) auth = tweepy.OAuthHandler(api_key, api_secret) auth.set_access_token(access_token, access_token_secret) api = tweepy.API(auth) # Import KafkaProducer from Kafka library from kafka import KafkaProducer # Define server with port # bootstrap_servers = 'localhost:9092' # Define topic name where the message will publish # topicName = 'pythonTwitter' # Initialize producer variable settings = { "bootstrap_servers": 'localhost:9092', # "compression_type":'snappy', "batch_size": 32 * 1024, "linger_ms": 20, "acks": 'all', "retries": 1000, "max_in_flight_requests_per_connection": 5 }
) client = mqtt.Client(client_id=MQTT_CLIENTID) client.on_connect = mqtt_on_connect client.on_message = mqtt_on_message client.connect(MQTT_BROKER_ADDRESS, MQTT_BROKER_PORT, MQTT_KEEPALIVE_SECS) client.loop_start() while True: pass # entry if DO_FACEBOOK: graph = facebook.GraphAPI(access_token=FB_TOKEN, version="3.1") if DO_TWITTER: tw_auth = tweepy.OAuthHandler(TW_CON_KEY, TW_CON_SEC) tw_auth.set_access_token(TW_ACC_KEY, TW_ACC_SEC) tw = tweepy.API(tw_auth) # daemon daemon = Daemonize(app='spaceprobed', pid=PID_FILE_LOC, user=D_USER, group=D_GROUP, action=main_loop, keep_fds=keep_fds) daemon.start()