#!/usr/bin/env python import csv import logging import sys logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( '%(asctime)s %(name)-12s %(levelname)-8s %(message)s') handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) sys.path.append('/Users/lorenamesa/Desktop/pyten/lib/python2.7/site-packages/') from api.twitter_api import Twitter twitter = Twitter() user_timeline = twitter.get_user_timeline(user_id=None, screen_name="cta") with open("/Users/lorenamesa/Desktop/pytennessee/cta_tweet_data.csv", "ab") as csvdata: if user_timeline: logging.info("Writing tweets for cta timeline data...") wr = csv.writer(csvdata, dialect='excel') for tweet in user_timeline: print tweet.__dict__.keys() # ['tweet_id', 'created_at', 'text'] wr.writerow(tweet.__dict__.values())
logging.info("Writing uber duration data...") for prediction in all_uber_duration_predictions: db.query("INSERT INTO uber_durations " + "(requested_time, type, duration, surge, low_estimate, high_estimate, lat, long, end_lat, end_long) " + "VALUES ({0}, '{1}', {2}, {3}, {4}, {5}, {6}, {7}, {8}, {9})".format(prediction.requested_time, prediction.type, prediction.duration, prediction.surge, prediction.low_estimate or 0, prediction.high_estimate or 0, prediction.lat, prediction.long, prediction.ending_lat, prediction.ending_long)) twitter = Twitter() user_timeline = twitter.get_user_timeline(requested_time=requested_time, user_id=None, screen_name="cta") if user_timeline: logging.info("Writing tweets for cta timeline data...") for tweet in user_timeline: try: db.query("INSERT INTO tweets (requested_time, tweet_id, text, created_at) " + 'VALUES ({0}, {1}, "{2}", "{3}")'.format(tweet.requested_time, tweet.tweet_id, tweet.text, tweet.created_at)) except IntegrityError as e: logger.info("Skipping tweet_id {0} already in DB".format(tweet.tweet_id))