def main(): print "[+] Starting Twitter Crawler Export Tool" result = [] user_list = database.get_users() for user in user_list: print "[+] Proccessing User: %s" % user try: friends = database.get_friends(user) except: print "[-] Error No Friend Data Found For %s Omitting" % user friends = [] try: followers = database.get_followers(user) except: print "[-] Error No Follower Data Found For %s Omitting" % user followers = [] result.append({ "screen_name": user, "friends": friends, "followers": followers }) file = open("export.json", "w") file.write(json.dumps(result)) file.close() print "[+] Success Data Saved As export.json"
def run(): logging.debug('entering run()') # Get the latest tweet from @FSCoffeeBot tweet = twitter.get_latest_tweet() if not tweet: logging.warning('no tweet found') return False last_tweet_id = int(database.get_last_handled_tweet_id()) this_tweet_id = int(tweet['id']) logging.debug('found tweet. tweet id: %s' % this_tweet_id) if not last_tweet_id: # We haven't init-ed the database yet - this must be the first run logging.info('No last_tweet_id found. Setting it.') database.set_last_handled_tweet_id(this_tweet_id) return False if last_tweet_id >= this_tweet_id: logging.debug('We already processed this tweet') return False # If we've made it here, it must be a new tweet tweet_text = tweet['text'] logging.info('New tweet found: %s', tweet_text) for user in database.get_followers(): database.push_message_to_send(database.MessageToSend(user, tweet_text)) database.set_last_handled_tweet_id(this_tweet_id)
def main(): print "[+] Starting Twitter Crawler Export Tool" result = [] user_list = database.get_users() for user in user_list: print "[+] Proccessing User: %s" % user try: friends = database.get_friends(user) except: print "[-] Error No Friend Data Found For %s Omitting" % user friends = [] try: followers = database.get_followers(user) except: print "[-] Error No Follower Data Found For %s Omitting" % user followers = [] result.append({"screen_name": user, "friends": friends, "followers": followers}) file = open("export.json", "w") file.write(json.dumps(result)) file.close() print "[+] Success Data Saved As export.json"
def get_following_curators(curator_id): follower_ids = database.get_followers(curator_id) return json.dumps({"ids": follower_ids})
def import_tweets(client, usernames): for username in usernames: print 'Importing tweets for %s' % (username,) url = 'http://twitter.com/statuses/user_timeline/%s.json' % (username,) user_tweet_columns = [] user_id = None followers = None follower_columns = defaultdict(lambda: []) tweets = json.loads(urllib2.urlopen(url).read()) for tweet in tweets: user = tweet.pop('user', None) if user_id is None: user_id = str(user['id']) followers = map(str, get_followers(user_id)) tweet['user_id'] = user['id'] parsed_date = rfc822.parsedate(tweet['created_at']) created_at_in_seconds = calendar.timegm(parsed_date) tweet['created_at'] = datetime.datetime.fromtimestamp( created_at_in_seconds) tweet['created_at_in_seconds'] = created_at_in_seconds tweet_id = str(tweet['id']) columns = [] for key, value in tweet.items(): columns.append(column_t( columnName=key, value=unicode(value).encode('utf-8'), timestamp=created_at_in_seconds )) if columns: client.batch_insert(batch_mutation_t( table='TwitterClone', key=tweet_id, cfmap={'tweets': columns}, ), True) user_tweet_columns.append(column_t( columnName=tweet_id, value=tweet_id, timestamp=created_at_in_seconds )) for follower in followers: follower_columns[follower].append(column_t( columnName=tweet_id, value=tweet_id, timestamp=created_at_in_seconds )) for follower, columns in follower_columns.iteritems(): supercolumn = superColumn_t( name='friend_tweets', columns=columns ) client.batch_insert_superColumn(batch_mutation_t( table='TwitterClone', key=follower, cfmap={'tweet_edges': [supercolumn]} ), True) if user_tweet_columns: supercolumn = superColumn_t( name='user_tweets', columns=user_tweet_columns ) client.batch_insert_superColumn(batch_mutation_t( table='TwitterClone', key=user_id, cfmap={'tweet_edges': [supercolumn]} ), True)