def get(self): self.response.out.write('Consolidation started. <br />') self.consolidate() self.response.out.write('Consolidation finished. <br />') if config_values.tweeting_enabled == True: self.response.out.write('Tweeting started. <br />') data = self.get_data_to_tweet() t = Tweeter(data) t.tweet() for ca in data: ca.date_tweeted = datetime.now() db.put(ca) self.response.out.write('Tweeted close approach of object %s and saved date tweeted as %s<br />' % (ca.object_name, ca.date_tweeted)) self.response.out.write('Successully tweeted %d close approaches. <br />' % len(t.data_to_tweet)) self.response.out.write('Tweeting finished. <br />') else: self.response.out.write('Tweeting is disabled. <br />')
def main(): print("Welcome to my nonary game....\n") authDict = getAuthOptions() if isinstance(authDict, str): print("Unable to get authorization tokens, please try again") return tweeterer = Tweeter(authDict["token"], authDict["secret"]) pause = input("wait until it says account set please") say_what = input("What do we say? ") whom = input("tweet at someone? ") tweet = "@%s %s" % (whom, say_what) to_send = input("do you want to send this tweet?") if to_send == "yes": tweeterer.send_tweet(tweet) '''
def cache_article(self, share_url=None, corry_id=None): result = requests.get(share_url) if result.status_code == 200: self.parse_article(result.content) if self.title: self.corry_id = corry_id self.share_url = share_url self.insert() print("\nNew article: %s - %s" % (self.corry_id, self.title)) Tweeter().send_tweet(self) return self
import time from card import Card from querent import Querent from reading import Reading from tarotdb import TarotDB from tweeter import Tweeter import os from os import environ CONSUMER_KEY = os.environ.get('CONSUMER_KEY') CONSUMER_SECRET = os.environ.get('CONSUMER_SECRET') ACCESS_KEY = os.environ.get('ACCESS_KEY') ACCESS_SECRET = os.environ.get('ACCESS_SECRET') db = TarotDB("diviner.db") tw = Tweeter(CONSUMER_KEY, CONSUMER_SECRET, ACCESS_KEY, ACCESS_SECRET) querents = Querent() for q in querents.get_querents(): reading = Reading(db, q) tweet = reading.get_reading() tw.tweet(tweet)
def main(): """ Tweeter object and related credentials """ tweeter = Tweeter() # c_key = '' # c_secret = '' # a_token = '' # a_secret = '' """ Login to twitter using above credentials """ # tweeter.login(c_key, c_secret, a_token, a_secret) try: load = sys.argv[1] except Exception: Utility.error('main', 'Error in passed parameters.') """ Create MarkovModel object to formulate tweets """ model = MarkovModel() try: # Load a already saved model if load in ['-l', '-L']: filename, keyword, prefix, suffix, n_tweets = load_params() Utility.log('main', 'Loading model from file {0}'.format(filename)) model.load('../model/m_blk_{0}'.format(filename)) tweeter.start_tweeting(time=1, keywords=keyword.split(), prefix=prefix, suffix=suffix) tweeter._autoconstruct(model, int(n_tweets)) # Carve up a dictionary from read elif load in ['-r', '-R']: filename, keyword, prefix, suffix, n_tweets = load_params() Utility.log( 'main', 'Training model from file {0}, and saving.'.format(filename)) model.read('../data/{0}.txt'.format(filename)) model.save('../model/m_blk_{0}'.format(filename.split('.')[0])) tweeter.start_tweeting(time=1, keywords=keyword.split(), prefix=prefix, suffix=suffix) tweeter._autoconstruct(model, int(n_tweets)) # Collect tweets and store to a database elif load in ['-c', '-C']: no = sys.argv[2] Utility.log( 'main', 'Collecting {0} tweets and saving them to db.'.format(no)) tweets = tweeter.read_tweets(int(no)) Tweeter.store(tweets) # Load a number of tweets and amplify elif load in ['-a', '-A']: no = sys.argv[2] timeout = sys.argv[3] Utility.log( 'main', 'Tweeting {0} tweets every {1} seconds'.format(no, timeout)) tweeter.amplify_tweets(int(no), int(timeout)) else: Utility.error('main', 'Invalid parameters') Utility.log('main', 'Exiting program ...') except KeyboardInterrupt: Utility.log('main', 'Terminating program ...')
from tweeter import Tweeter from freep import Freep from keys import Keys dalereedsay = Tweeter('dalereedsay',Keys.dalereedsay()) dalereed = Freep('dalereed', dalereedsay.mostRecent()) #print "dalereed comments:{0}".format(dalereed.newComments()) dalereedsay.postList(dalereed.newComments()) #dalereedsay.postList(dalereed.newMentions()) jimrobsay = Tweeter('jimrobsay', Keys.jimrobsay()) jimrob = Freep('jimrobinson', jimrobsay.mostRecent()) #print "jimrob comments:{0}".format(jimrob.newComments()) jimrobsay.postList(jimrob.newComments()) #jimrobsay.postList(jimrob.newMentions())
#!/usr/bin/python3 import handlers import watchdog from breath_analyzer import BreathAnalyzer from camera import Obscura from tweeter import Tweeter if __name__ == "__main__": with watchdog.get_connection_to_arduino() as serialConn: serialConn.read_until('SPS='.encode()) samples_per_second = float(serialConn.readline()) handler = handlers.OnThresholdExceededHandler( '/common/stream', Tweeter(), Obscura('/dev/video0', (640, 480)), serialConn, status_maker=lambda datetime, value, message: "{}\nThe value measured was {:.1f} units, at {}.".format( message, value, datetime.strftime("%X on %x"))) breathAnalyzer = BreathAnalyzer( samples_per_second, handler, on_reset=lambda: serialConn.write('Sensor is ready'.encode())) watchdog.keep_reading(serialConn, breathAnalyzer.add_gas_concentration)
#remove stop words # def create_bi_grams(line): # bi_grams = [(line[i], line[i + 1]) for i in range(0, len(line) - 1)] # return bi_grams def remove_stop_words(line): #word_list = [ [word for word in line.split(' ') if word not in stopwords.words('english')] for line in tweet] word_list = [word for word in line if word not in stopwords.words('english')] return word_list if __name__ == '__main__': # Run all of this methods on each of the tweets. url = "https://en.wikipedia.org/w/api.php?action=query&format=json&prop=pageprops&ppprop=disambiguation&\ redirects&titles=" dest_url = "https://en.wikipedia.org/wiki/" tw = Tweeter() tw.get_one_tweet() #tw.text = "Barak Obama in America at Arizona State University" # hardcoding for time being tw.text = "Venezuela blackout stops subway causes traffic jams and interrupts presidential broadcast" # for now hard code one tweet and proceed word_list = remove_stop_words(word_tokenize(tw.text)) console_log(word_list) wn = Wordnet() pos_word_list = wn.pos_word_list(word_list) bi_grams =wn.get_bi_gram_candidates(pos_word_list) #console_log('bi grams: ', bi_grams) uni_grams = wn.get_uni_gram_candidates(pos_word_list) #console_log('uni grams: ', uni_grams) bg_wiki_url_terms = wikipedia.get_wikipedia_urls(url, dest_url, bi_grams) console_log('bg wiki terms: ', bg_wiki_url_terms) unknown_uni_grams = wikipedia.clean_uni_gram_candidates(uni_grams, bg_wiki_url_terms)
import inflect from tweeter import Tweeter from wordnikclient import get_singular_noun from twitterclient import get_client from badwords import ContainsBadWordException tweeter = Tweeter(get_client('mozerablebot')) inflector = inflect.engine() # get a random noun that isn't a word of oppression posted = False while not posted: status = "I was looking for {n}, and then I found {n}\nand Heaven knows I'm miserable now".format(n=inflector.a(get_singular_noun())) try: tweeter.tweet(status) posted = True except ContainsBadWordException: pass
from flask import Flask, render_template from tweeter import Tweeter import os app = Flask(__name__) tweetObject = Tweeter("#MLHLocalhost",20) tweetObject.scrapeTweets() @app.route('/') def homepage(): return render_template('index.html', tweets=tweetObject.getTweets()) port = int(os.environ.get('PORT', 5000)) app.run(host='0.0.0.0', port=port, debug=True)
def main(): """ main the main driver code which logs into using the twitter credintials and executes the script according to the given modifier Options: -l : Load model from file. Use this if using an existing model. Filename : Name of pickle file to load markov model. Keywords : Seedwords which are an intergral part of tweet. Keywords may be single or multiple. Prefix : Word/s to start the tweet with. Prefix may be single or multiple words. Suffix : Word/s to add at the end of tweet. Suffix may be single or multiple words. num_tweets : Number of tweets to be written. -r : Read file to create model. Use this if including your own text file. Filename : Name of text file to construct markov model. Keywords : Seedwords which are an intergral part of tweet. Keywords may be single or multiple. Prefix : Word/s to start the tweet with. Prefix may be single or multiple words. Suffix : Word/s to add at the end of tweet. Suffix may be single or multiple words. num_tweets : Number of tweets to be written. -c : Collect tweets from TwitterStream. no : Number of tweets to collect. -a : Amplify tweets i.e. Retweet tweets stored using -c. no : Number of tweets to amplify. timeout : Time to wait in seconds before retweeting. logs into the twitter using the given credentials. If there is error it catches the exception and returns an exception message """ tweeter = Tweeter() # c_key = '' # c_secret = '' # a_token = '' # a_secret = '' # tweeter.login(c_key, c_secret, a_token, a_secret) try: load = sys.argv[1] except Exception: Utility.error('main', 'Error in passed parameters.') # Create MarkovModel object to formulate tweets model = MarkovModel() try: # Load a already saved model if load in ['-l', '-L']: filename, keyword, prefix, suffix, n_tweets = load_params() Utility.log('main', 'Loading model from file {0}'.format(filename)) model.load('../model/m_blk_{0}'.format(filename)) tweeter.start_tweeting( time=1, keywords=keyword.split(), prefix=prefix, suffix=suffix) tweeter._autoconstruct(model, int(n_tweets)) # Carve up a dictionary from read elif load in ['-r', '-R']: filename, keyword, prefix, suffix, n_tweets = load_params() Utility.log( 'main', 'Training model from file {0}, and saving.'.format(filename)) model.read('../data/{0}.txt'.format(filename)) model.save('../model/m_blk_{0}'.format(filename.split('.')[0])) tweeter.start_tweeting( time=1, keywords=keyword.split(), prefix=prefix, suffix=suffix) tweeter._autoconstruct(model, int(n_tweets)) # Collect tweets and store to a database elif load in ['-c', '-C']: no = sys.argv[2] Utility.log( 'main', 'Collecting {0} tweets and saving them to db.'.format(no)) tweets = tweeter.read_tweets(int(no)) Tweeter.store(tweets) # Load a number of tweets and amplify elif load in ['-a', '-A']: no = sys.argv[2] timeout = sys.argv[3] Utility.log( 'main', 'Tweeting {0} tweets every {1} seconds'.format(no, timeout)) tweeter.amplify_tweets(int(no), int(timeout)) else: Utility.error('main', 'Invalid parameters') Utility.log('main', 'Exiting program ...') except KeyboardInterrupt: Utility.log('main', 'Terminating program ...')
def test_bad_status_raises_exception(mock_client): t = Tweeter(mock_client) s = 'this status contains the word mustard.' with raises(ContainsBadWordException): t.tweet(s)
def test_clean_status_is_tweeted(mock_client): t = Tweeter(mock_client) s = 'this is an ok status!' t.tweet(s) assert_tweeted(mock_client, s)
# Camera warm-up time time.sleep(preview_time) camera.capture(stream, 'jpeg') return stream def watermark(filename, msg): img = Image.open(filename) draw = ImageDraw.Draw(img) font = ImageFont.truetype('roboto/Roboto-Regular.ttf', 36) draw.text((10, 10), msg, (0, 0, 0), font=font) img.save(filename) if (__name__ == "__main__"): preview_time = 1 stream = read_image(preview_time) filename = "image.jpg" with open(filename, 'wb') as file: file.write(stream.getvalue()) cpu = CpuTemp() msg = "CPU temp: " + cpu.read() + "C" watermark(filename, msg) if config["tweet"] == True: tweeter = Tweeter(config, tweepy) tweeter.send( filename, "Internet of Seeds Mark II - https://github.com/alexellis/seeds2")
camera.hflip = True camera.resolution = (1920, 1080) # Camera warm-up time time.sleep(preview_time) camera.capture(stream, 'jpeg') return stream def watermark(filename, msg): img = Image.open(filename) draw = ImageDraw.Draw(img) font = ImageFont.truetype('roboto/Roboto-Regular.ttf', 36) draw.text((10, 10), msg, (0, 0, 0), font=font) img.save(filename) if (__name__ == "__main__"): preview_time = 1 stream = read_image(preview_time) filename = "image.jpg" with open(filename, 'wb') as file: file.write(stream.getvalue()) cpu = CpuTemp() msg = "CPU temp: " + cpu.read() + "C" watermark(filename, msg) if config["tweet"] == True: tweeter = Tweeter(config, tweepy) tweeter.send(filename, "Internet of Seeds Mark II")