def post( self, bot_name ): creds = twitter.get_twitter_creds( bot_name ) if not self.authenticate_user( creds ): self.render_notloggedin() else: bot_settings = settings.get_settings( creds ) bot_settings.learning_style = self.request.get( 'learnfrom' ) bot_settings.learning_guru = self.request.get( 'guru_name' ) bot_settings.locquacity_onschedule = self.request.get( 'locquacity_onschedule' ) == "true" bot_settings.locquacity_reply = self.request.get( 'locquacity_reply' ) == "true" bot_settings.locquacity_speakonnew = self.request.get( 'locquacity_speakonnew' ) == "true" bot_settings.learn_retrospectively = self.request.get( 'learn_retrospectively' ) == "true" gn = self.request.get( 'locquacity_greetnew' ) == "true" logging.debug( 'SettingsHandler.post(): locquacity_greetnew=%s, bot_settings.locquacity_greetnew=%s' % (gn, bot_settings.locquacity_greetnew) ) if gn and not bot_settings.locquacity_greetnew: logging.debug( '-> fetch follower ids' ) api = twitter.get_api( creds ) follower_ids = api.followers_ids() creds.follower_ids = follower_ids creds.put() bot_settings.locquacity_greetnew = gn tweet_frequency = self.request.get( 'tweet_frequency' ) if tweet_frequency is not None and len(tweet_frequency) > 0: bot_settings.tweet_frequency = float( tweet_frequency ) tweet_chance = self.request.get( 'tweet_chance' ) if tweet_chance is not None and len(tweet_chance) > 0: bot_settings.tweet_chance = float( tweet_chance ) self.render_template( creds, bot_settings, { "saved" : True } ) bot_settings.creds = creds bot_settings.put()
def main(): parser = argparse.ArgumentParser(description='A cool twitter bot :)') parser.add_argument('--update-header', action='store_true') args = parser.parse_args() bot = TwitterBot(get_api()) if args.update_header: bot.update_header_image() else: bot.tweet_random_word()
def run( creds, force_tweet=False, debug=False ): if not debug: try: debug = config.DEBUG_MODE except AttributeError: pass if debug: force_tweet = True logging.debug( "brains.run(), force_tweet is %s, debug is %s" % (force_tweet, debug) ) then = datetime.datetime.now() bot_settings = settings.get_settings( creds ) bot_state = state.get_state( creds ) bot_state.last_run = then bot_state.put() deadline = then + TIME_LIMIT learning_style = bot_settings.learning_style api = twitter.get_api( creds ) statuses_digested = 0 namespace_manager.set_namespace( creds.screen_name ) logging.debug( "brains.run(): learning_style is: %s" % learning_style ) worker = verbivorejr.VerbivoreWorker( api, bot_settings ) worker.deadline = deadline if learning_style == constants.learning_style_oneuser: # learn from one user guru_name = bot_settings.learning_guru guru = twitter.get_user( screen_name=guru_name ) statuses_digested = worker.digest_user( guru ) elif learning_style == constants.learning_style_following: guru_ids = api.friends_ids( stringify_ids=True ) statuses_digested = worker.digest_ids( guru_ids ) elif learning_style == constants.learning_style_followers: guru_ids = api.followers_ids( stringify_ids=True ) statuses_digested = worker.digest_ids( guru_ids ) worker.put() logging.debug( "brains.run(): digested %d new statuses" % statuses_digested ) # only continue if chance is met if bot_settings.tweet_chance < random.random() and force_tweet is False: logging.debug( "brains.run(): didn't meet tweet_chance of %2.1f" % bot_settings.tweet_chance ) return do_tweet = False if force_tweet: logging.debug( "brains.run(): force_tweet is set" ) do_tweet = True elif bot_settings.locquacity_onschedule: logging.debug( "brains.run(): will tweet on schedule" ) do_tweet = True elif bot_settings.locquacity_speakonnew and statuses_digested > 0 : logging.debug( "brains.run(): locquacity_speakonnew, statuses_digested: %s" % statuses_digested ) do_tweet = True # check deadline, defer tweeting if necessary if datetime.datetime.now() >= deadline: logging.debug( "brains.run(): aborted after put()'ing worker, deadline is looming." ) taskqueue.add( url="/%s/run" % api.me().screen_name ) return queen = verbivorejr.VerbivoreQueen() queen.deadline = deadline if do_tweet: tweet = None safety = 10 while tweet is None and safety > 0: tweet = queen.secrete( 130 ) safety = safety - 1 if tweet is not None: tweet = verbivorejr.uc_first( tweet ) post_tweet( api, tweet, debug=debug ) replied_userids = [] if bot_settings.locquacity_reply: last_replied_id = bot_state.last_replied_id logging.debug( "brains.run(): last_replied_id is %s" % last_replied_id ) mentions = api.mentions( since_id=last_replied_id ) logging.debug( "-> %d mentions" % len(mentions) ) my_name = "@%s" % creds.screen_name last_timestamp = None for mention in mentions: if datetime.datetime.now() >= deadline: break # only reply when we've been directly addressed #if mention.text[:len(my_name)] != my_name: # break logging.debug( "-> reply to %s" % mention.author.screen_name ) reply = "@%s" % mention.author.screen_name tweet = None safety = 5 while tweet is None and safety > 0: logging.debug( "--> generate reply, safety=%d" % safety ) if datetime.datetime.now() >= deadline: break tweet = queen.secrete_reply( mention.text, 130 - len(reply) ) safety = safety -1 if tweet is not None: reply = "%s %s" % (reply, tweet) post_tweet( api, reply, in_reply_to_status_id=mention.id, debug=debug ) replied_userids.append( mention.author.id ) this_timestamp = mention.created_at if last_timestamp is None or this_timestamp > last_timestamp: last_replied_id = mention.id_str last_timestamp = this_timestamp bot_state.last_replied_id = last_replied_id bot_state.put() if bot_settings.locquacity_greetnew: if datetime.datetime.now() >= deadline: logging.debug( "brains.run(): aborted before greeting new followers, deadline is looming." ) return new_follower_ids = None stored_follower_ids = creds.follower_ids api_follower_ids = api.followers_ids() if stored_follower_ids is None: new_follower_ids = api_follower_ids else: new_follower_ids = [] for api_follower_id in api_follower_ids: if api_follower_id not in stored_follower_ids: new_follower_ids.append( api_follower_id ) if new_follower_ids is not None and len(new_follower_ids) > 0: logging.debug( "brains.run(): new_follower_ids: %s" % new_follower_ids ) for new_follower_id in new_follower_ids: if new_follower_id not in replied_userids: tw_user = api.get_user( user_id=new_follower_id ) screen_name = tw_user.screen_name safety = 5 greeting = None while greeting is None and safety > 0: greeting = queen.secrete_greeting( screen_name, 130 ) if greeting is not None: post_tweet( api, greeting, debug=debug ) else: logging.debug( "brains.run(): no new followers" ) creds.follower_ids = api_follower_ids creds.put() now = datetime.datetime.now() elapsed = now - then logging.debug( "brains.run(): completed in %d seconds" % elapsed.seconds )
# v0.2 # # # if os.path.isfile("users.pkl"): print("Loading tweet database...") with open("users.pkl", "rb") as fp: users = pickle.load(fp) else: print("Database not found, quitting...") sys.exit() try: # Initialise Tweepy tweetStuff = tw.tweepy_init() api = tw.get_api(tweetStuff) #Get descriptions count={} users.sort(); for i in range(len(users)): count[users[i]]=users.count(users[i]) del users users=list(count.keys()); users.sort(); with open('dataAnalysis.json','r') as f: k=json.load(f); keyWords=k['keywords']; regex=[] for i in range(len(keyWords)):
""" A simple web server to execute the bot functions """ import os from flask import Flask, jsonify from flask_httpauth import HTTPTokenAuth from hashlib import blake2b from twitter import get_api from bot import TwitterBot bot = TwitterBot(get_api()) app = Flask(__name__) auth = HTTPTokenAuth(scheme='Token') @auth.verify_token def verify_token(token): token_hash = blake2b(bytes(token, encoding='utf8')).hexdigest() return token_hash == os.environ['TOKEN_HASH'] @app.route('/') def index(): """ Return total number of tweets bot has tweeted so far """ return jsonify({ 'num_tweets': bot.get_num_tweets(),
def get_pnmean(diclist): pn_list = [] for word in diclist: pn = word['PN'] if pn != 'notfound': pn_list.append(pn) if len(pn_list) > 0: pnmean = np.mean(pn_list) else: pnmean = None return (pnmean) if __name__ == '__main__': t = Tokenizer() api = tw.get_api() search_results = tw.get_search_results(api) tweetlist = tw.get_tweets(search_results) #pnmean_list = [] tweet_pnmean_list = [] for td in tweetlist: tweet = td['tweet'] tweet = tweet.replace('\n', '') parsed_tweet = t.tokenize(tweet) diclist = get_diclist(parsed_tweet) diclist = add_pnvalue(diclist) #pp = pprint.PrettyPrinter(indent=4) #pp.pprint(diclist) pnmean = get_pnmean(diclist) #pnmean_list.append(pnmean) d = {'pnmean': pnmean, 'tweet': tweet}