def __init__(self, input_file='SMP.csv', config='config/options.ini'): self.input_file = input_file self.data_dict = DefaultOrderedDict(list) ts = time() self.stamp = datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') self.read_csv() options = os.path.join(os.path.dirname(__file__), config) self.config = ConfigParser() self.config.read(options)
def __init__(self, input_file='SMP.csv', config='config/options.ini'): self.input_file = input_file self.data_dict = DefaultOrderedDict(list) ts = time() self.stamp = datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') self.read_csv() options = os.path.join(os.path.dirname(__file__),config) self.config = ConfigParser() self.config.read(options)
class Social_Media_Tracker(object): """ Track a single entity across 3 diferent types of social media: Facebook, Twitter and Youtube. As of now you can retrieve: -No. of likes from facebook -No. of followers from Twitter -No. of Subscribers from Youtube This class takes a CSV input file with the following header: twitter_id, facebook_id, youtube_id, handle, scraping frequency (times/hour), likes, followers, subscribers, video_views. It also has a configuration file (.ini) for FB and Twitter App keys. The Youtube pinger method has dependencies that reside in current directory. """ def __init__(self, input_file='SMP.csv', config='config/options.ini'): self.input_file = input_file self.data_dict = DefaultOrderedDict(list) ts = time() self.stamp = datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') self.read_csv() options = os.path.join(os.path.dirname(__file__),config) self.config = ConfigParser() self.config.read(options) def read_csv(self): """Loads input csv file into memory as dict""" with open(self.input_file, 'r') as f: self.data = csv.DictReader(f) for header in self.data.fieldnames: self.data_dict[header] = [] for row in self.data: for key,value in row.iteritems(): self.data_dict[key].append(value) def write_csv(self): """Writes back to csv file""" with open(self.input_file, 'wb') as f: writer = csv.writer(f) values = self.data_dict.values() writer.writerow(self.data_dict.keys()) print range(len(values[0])) #print values for vals in range(len(values[0])): try: writer.writerow([row[vals] for row in values]) except: break def log_error(self, origin): """Logs error into txt file for debugging""" logging.basicConfig(filename='error.log',level=logging.WARNING, format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') #logging.warning('Warning: ') logging.exception('\n ping {} error: '.format(origin)) return def ping_facebook(self, *args, **kwargs): """pings facebook for number of likes in current time""" #Facebook API authentication procedure try: url = 'http://graph.facebook.com/' token = self.config.get('facebook','token') graph = facebook.GraphAPI(token) #Look up number of likes from facebook_username written in csv for i,username in enumerate(self.data_dict['fb_username']): response = urllib.urlopen(url+username) data = json.loads(response.read()) try: node = graph.get_object(data['id']) except: print data node = {'likes':'None'} print url+username # print 'facebook likes:', node['likes'] self.data_dict['likes '+self.stamp].append(node['likes']) except Exception, error: self.log_error('facebook')
class Social_Media_Tracker(object): """ Track a single entity across 3 diferent types of social media: Facebook, Twitter and Youtube. As of now you can retrieve: -No. of likes from facebook -No. of followers from Twitter -No. of Subscribers from Youtube This class takes a CSV input file with the following header: twitter_id, facebook_id, youtube_id, handle, scraping frequency (times/hour), likes, followers, subscribers, video_views. It also has a configuration file (.ini) for FB and Twitter App keys. The Youtube pinger method has dependencies that reside in current directory. """ def __init__(self, input_file='SMP.csv', config='config/options.ini'): self.input_file = input_file self.data_dict = DefaultOrderedDict(list) ts = time() self.stamp = datetime.fromtimestamp(ts).strftime('%Y-%m-%d %H:%M:%S') self.read_csv() options = os.path.join(os.path.dirname(__file__), config) self.config = ConfigParser() self.config.read(options) def read_csv(self): """Loads input csv file into memory as dict""" with open(self.input_file, 'r') as f: self.data = csv.DictReader(f) for header in self.data.fieldnames: self.data_dict[header] = [] for row in self.data: for key, value in row.iteritems(): self.data_dict[key].append(value) def write_csv(self): """Writes back to csv file""" with open(self.input_file, 'wb') as f: writer = csv.writer(f) values = self.data_dict.values() writer.writerow(self.data_dict.keys()) print range(len(values[0])) #print values for vals in range(len(values[0])): try: writer.writerow([row[vals] for row in values]) except: break def log_error(self, origin): """Logs error into txt file for debugging""" logging.basicConfig(filename='error.log', level=logging.WARNING, format='%(asctime)s %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p') #logging.warning('Warning: ') logging.exception('\n ping {} error: '.format(origin)) return def ping_facebook(self, *args, **kwargs): """pings facebook for number of likes in current time""" #Facebook API authentication procedure try: url = 'http://graph.facebook.com/' token = self.config.get('facebook', 'token') graph = facebook.GraphAPI(token) #Look up number of likes from facebook_username written in csv for i, username in enumerate(self.data_dict['fb_username']): response = urllib.urlopen(url + username) data = json.loads(response.read()) try: node = graph.get_object(data['id']) except: print data node = {'likes': 'None'} print url + username # print 'facebook likes:', node['likes'] self.data_dict['likes ' + self.stamp].append(node['likes']) except Exception, error: self.log_error('facebook')