def get_items(self, since): """Fetch and normalise the updates from the service. """ # http://api.flickr.com/services/rest/?method=flickr.people.getPhotos&api_key=123&user_id=50685137%40N00&format=json&nojsoncallback=1&auth_token=123&api_sig=123 # http://www.flickr.com/services/api/explore/flickr.people.getPhotos user_id = self._get_username() if not user_id: return photos = self._get_oauth_v1('http://api.flickr.com/services/rest/?method=flickr.people.getPhotos&user_id=%s&format=json&nojsoncallback=1&min_upload_date=%s' % (user_id, since.strftime('%Y-%m-%d+%H:%M:%S'))) items = [] if photos and int(photos['photos']['total']) > 0: for photo in photos['photos']['photo']: # Info about the pic #pic = self.flickr.photos_getInfo(photo_id=photo['id'], format='json', nojsoncallback='1') pic_json = self._get_oauth_v1('http://api.flickr.com/services/rest/?method=flickr.photos.getInfo&user_id=%s&format=json&nojsoncallback=1&photo_id=%s' % (user_id, photo['id'])) item = ServiceItem() item.title = pic_json['photo']['title']['_content'] item.body = pic_json['photo']['description']['_content'] # Use date from when the photo was uploaded to flickr NOT when it was taken item.created = datetime.fromtimestamp(float(pic_json['photo']['dates']['posted'])) #u'posted': u'1300054696' item.link_back = pic_json['photo']['urls']['url'][0]['_content'] item.tags = pic_json['photo']['tags']['tag'] item.favorite = pic_json['photo']['isfavorite'] item.url_thumb = "http://farm%s.static.flickr.com/%s/%s_%s_t.jpg" % ( pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret'] ) item.url_small = "http://farm%s.static.flickr.com/%s/%s_%s_m.jpg" % ( pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret'] ) item.body = '<br/><img src="%s" />' % (item.url_thumb,) item.service = self.service items.append(item) return items
def _convert_likes_feed(self, likes, since): """fetch using a cheeky url grab""" items = [] # get info about the like: # https://graph.facebook.com/271847211041 for like in likes['data']: if like.has_key('created_time'): created = self._convert_time_stamp(like['created_time']) if created.date() >= since: info_on_like = get_url('https://graph.facebook.com/%s' % (like['id'])) item = ServiceItem() item.created = created if like.has_key('name'): item.title = like['name'] if like.has_key('category'): item.body = like['category'] item.link_back = info_on_like['link'] item.service = self.service items.append(item) else: pass return items
def _convert_status_feed(self, statuses, since): """Take the feed of status updates from facebook and convert it to ServiceItems.""" items = [] for status in statuses['data']: if status.has_key('message'): if not status.has_key('updated_time'): pass else: created = self._convert_time_stamp(status['updated_time']) if created.date() >= since: item = ServiceItem() item.created = created item.title = '' item.body = status['message'] item.link_back = \ "http://www.facebook.com/%s/posts/%s?notif_t=feed_comment" % (status['from']['id'], status['id']) item.service = self.service items.append(item) return items
def get_items(user, since, model_instance=None): serv = model_instance or get_model_instance(user, __name__) items = [] at = AccessToken.objects.get(service=serv) try: feed = feedparser.parse('http://www.google.com/reader/public/atom/user/%s/state/com.google/broadcast' % (at.userid,)) for entry in feed.entries: updated = parser.parse(entry.updated) updated = (updated - updated.utcoffset()).replace(tzinfo=None) if updated.date() >= since: item = ServiceItem() item.title = entry.title # we dont take the summary as its huge #if entry.has_key('summary'): item.body = '' if entry.has_key('links'): item.link_back = entry['links'] if entry.has_key('link'): item.link_back = entry['link'] item.created = updated item.service = serv item.user = user # for stats o = urlparse(entry.source.link) item.source = o.netloc item.author = entry.author # person making comment # entry.content[0].value == coment items.append(item) except Exception, e: raise LoggedServiceError(original_exception=e)
def get_items(self, since): items = [] try: feed = feedparser.parse( 'http://www.google.com/reader/public/atom/user/%s/state/com.google/broadcast' % ( self.service.auth.secret, ) ) for entry in feed.entries: updated = parser.parse(entry.updated) updated = (updated - updated.utcoffset()).replace(tzinfo=None) if updated.date() >= since: item = ServiceItem() item.title = entry.title # We dont take the summary as its huge item.body = '' if entry.has_key('links'): item.link_back = entry['links'] if entry.has_key('link'): item.link_back = entry['link'] item.created = updated item.service = self.service # For stats o = urlparse(entry.source.link) item.source = o.netloc # Person making comment item.author = entry.author items.append(item) except Exception, e: raise LoggedServiceError(original_exception=e)
def _convert_feed(self, feed, since, screen_name): """Take the json and convert to ServiceItems""" items = [] for status in feed: # We are interested in tweets since if status.created_at.date() >= since: item = ServiceItem() twitter_text = TwitterText(status.text) twitter_text.autolink.auto_link_usernames_or_lists() twitter_text.autolink.auto_link_hashtags() item.body = unicode(twitter_text.text) if re.search("http://yfrog.com/\S*", item.body) \ or re.search("http://twitpic.com/\S*", item.body): item.pic_link = True item.created = status.created_at item.link_back = "http://twitter.com/%s/status/%s" % \ (screen_name, str(status.id)) if status.geo: item.location['lat'] = status.geo['coordinates'][0] item.location['long'] = status.geo['coordinates'][1] item.service = self.service items.append(item) return items
def _convert_recent_tracks_json(self, json): """Convert the json returned from getrecenttrack into ServiceItems. """ items = [] if json.has_key('recenttracks') and json['recenttracks'].has_key('track'): for track in json['recenttracks']['track']: if track.has_key('date'): item = ServiceItem() item.title = track['name'] item.body = 'by %s' % (track['artist']['#text'],) item.link_back = track['url'] item.created = datetime.strptime( track['date']['#text'], '%d %b %Y, %H:%M' ) item.service = self.service if type(item.created) is tuple and len(item.created) > 0: item.created = item.created[0] items.append(item) return items
def _convert_feed(self, json, since): """Convert the json feed into Service Items limiting on since. """ items = [] # FIXME add filter on date! for entry in json: # Check we have a valid feed if entry != '404 Not Found': item = ServiceItem() created = datetime.strptime(entry['dt'], '%Y-%m-%dT%H:%M:%SZ') #'2010-11-23T22:03:29Z' if created.date() >= since: try: item.title = entry['d'] item.body = entry['n'] item.created = created item.link_back = entry['u'] item.notes = entry['n'] item.tags = entry['t'] item.service = self.service items.append(item) except: pass return items
def _convert_feed(self, json, since): """Take the raw json from the feed and convert it to ServiceItems. """ items = [] if json and json['response'].has_key('checkins'): for checkin in json['response']['checkins']['items']: created = datetime.fromtimestamp(checkin['createdAt']) if created.date() >= since: item = ServiceItem() item.location = {} item.link_back = 'http://foursquare.com/venue/%s' % (checkin['venue']['id'],) item.title = checkin['venue']['name'] if checkin.has_key('shout') and checkin['shout']: item.body = checkin['shout'] else: if len(checkin['venue']['categories']) > 0 and checkin['venue']['location'].has_key('city'): item.body = "A %s in %s" % (checkin['venue']['categories'][0]['name'], checkin['venue']['location']['city']) elif checkin['venue'].has_key('city'): item.body = "In %s" % (checkin['venue']['location']['city']) else: item.body = "%s" % (checkin['venue']['name']) if checkin['venue']['location'].has_key('lat') and checkin['venue']['location']['lng']: item.location['lat'] = checkin['venue']['location']['lat'] item.location['long'] = checkin['venue']['location']['lng'] item.created = created item.service = self.service if checkin.has_key('isMayor'): item.is_mayor = checkin['isMayor'] else: pass if checkin['venue'].has_key('categories') and len(checkin['venue']['categories']) > 0: item.icon = checkin['venue']['categories'][0]['icon'] item.categories = checkin['venue']['categories'] items.append(item) del(item) return items
def get_items(self, since): """Fetch and normalise the updates from the service. """ self.flickr = flickrapi.FlickrAPI(self.service.app.auth_settings['api_key']) photos = self._get_service_items(since) or {} items = [] if photos: for photo in photos: # Info about the pic pic = self.flickr.photos_getInfo(photo_id=photo['id'], format='json', nojsoncallback='1') pic_json = simplejson.loads(pic) item = ServiceItem() item.title = pic_json['photo']['title']['_content'] item.body = pic_json['photo']['description']['_content'] # Use date from when the photo was uploaded to flickr NOT when it was taken item.created = datetime.fromtimestamp(float(pic_json['photo']['dates']['posted'])) #u'posted': u'1300054696' item.link_back = pic_json['photo']['urls']['url'][0]['_content'] item.tags = pic_json['photo']['tags']['tag'] item.favorite = pic_json['photo']['isfavorite'] item.url_thumb = "http://farm%s.static.flickr.com/%s/%s_%s_t.jpg" % ( pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret'] ) item.url_small = "http://farm%s.static.flickr.com/%s/%s_%s_m.jpg" % ( pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret'] ) item.body = '<br/><img src="%s" />' % (item.url_thumb,) item.service = self.service items.append(item) return items
def _create_checkin(self, checkin): """Convert a raw checkin into a service item""" item = ServiceItem() created = None if checkin.has_key('createdAt'): created = datetime.fromtimestamp(checkin['createdAt']) item.location = {} item.link_back = 'http://foursquare.com/venue/%s' % (checkin['venue']['id'],) item.title = checkin['venue']['name'] if checkin['venue'].has_key('location') and checkin['venue']['location'].has_key('city'): item.city = checkin['venue']['location']['city'] if checkin.has_key('shout') and checkin['shout']: item.body = checkin['shout'] else: if len(checkin['venue']['categories']) > 0 and checkin['venue']['location'].has_key('city'): item.body = "A %s in %s" % (checkin['venue']['categories'][0]['name'], checkin['venue']['location']['city']) elif checkin['venue'].has_key('city'): item.body = "In %s" % (checkin['venue']['location']['city']) else: item.body = "%s" % (checkin['venue']['name']) if checkin['venue']['location'].has_key('lat') and checkin['venue']['location']['lng']: item.location['lat'] = checkin['venue']['location']['lat'] item.location['long'] = checkin['venue']['location']['lng'] if created: item.created = created item.service = self.service if checkin.has_key('isMayor'): item.is_mayor = checkin['isMayor'] else: pass if checkin['venue'].has_key('categories') and len(checkin['venue']['categories']) > 0: item.icon = checkin['venue']['categories'][0]['icon'] item.categories = checkin['venue']['categories'] return item
def _convert_feed(serv, user, json, since): """Take the raw json from the feed and convert it to ServiceItems.""" items = [] if json and json.has_key('checkins'): for checkin in json['checkins']: # grab the +0000 bit on the end of the date and use it make the time right offset = checkin['created'].rsplit(' ')[-1] offset = offset[1:] offset = offset[:2] time_offset = timedelta(hours=int(offset)) created = datetime.strptime(checkin['created'].replace(' +0000', ''), '%a, %d %b %y %H:%M:%S') #'Fri, 04 Feb 11 12:42:38 +0000' created = created + time_offset if created.date() >= since: item = ServiceItem() item.location = {} item.link_back = 'http://foursquare.com/venue/%s' % checkin['venue']['id'] item.title = checkin['venue']['name'] if checkin.has_key('shout') and checkin['shout']: item.body = checkin['shout'] else: item.body = checkin['venue']['city'] if checkin['venue'].has_key('geolat') and checkin['venue']['geolat']: item.location['lat'] = checkin['venue']['geolat'] item.location['long'] = checkin['venue']['geolong'] item.created = created item.service = serv if checkin['venue'].has_key('primarycategory'): item.icon = checkin['venue']['primarycategory']['iconurl'] item.user = user items.append(item) del(item) return items
def get_items(user, since, model_instance): """Fetch and normalise the updates from the service.""" serv = model_instance or get_model_instance(user, __name__) access_token = AccessToken.objects.get(service=serv) flickr = flickrapi.FlickrAPI(access_token.api_token) photos = _get_service_items(user, model_instance, flickr, serv, access_token) items = [] if photos and photos['photos'].has_key('photo'): for photo in photos['photos']['photo']: # info about the pic pic = flickr.photos_getInfo(photo_id=photo['id'], format='json', nojsoncallback='1') pic_json = simplejson.loads(pic) item = ServiceItem() item.title = pic_json['photo']['title']['_content'] # use date from when the photo was uploaded to flickr NOT when it was taken item.created = datetime.fromtimestamp(float(pic_json['photo']['dates']['posted'])) #u'posted': u'1300054696' item.service = serv item.link_back = pic_json['photo']['urls']['url'][0]['_content'] item.tags = pic_json['photo']['tags']['tag'] item.favorite = pic_json['photo']['isfavorite'] item.url_thumb = "http://farm%s.static.flickr.com/%s/%s_%s_t.jpg" % \ (pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret']) item.url_small = "http://farm%s.static.flickr.com/%s/%s_%s_m.jpg" % \ (pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret']) item.body = "<br/><img src='" + item.url_thumb +"'/>" # add location item.user = user items.append(item) return items
def _convert_link_feed(serv, user, stream, since): """Convert link feed.""" items = [] for entry in stream: if entry.has_key('created_time'): created = datetime.fromtimestamp(entry['created_time']) if created.date() >= since: item = ServiceItem() item.created = datetime.fromtimestamp(entry.created_time) item.link_back = entry.url item.title = entry.title if entry.summary: item.body = entry.summary else: item.body = entry.owner_comment item.url = entry.url item.service = serv item.user = user items.append(item) return items
def _convert_albums_feed(self, albums, since): """Find new albums and display the pics""" items = [] for album in albums['data']: if album.has_key('created_time'): created = self._convert_time_stamp(album['created_time']) if created.date() >= since: # fetch likes #https://graph.facebook.com/album['id']/likes #https://graph.facebook.com/photo['cover_photo'] item = ServiceItem() item.created = created if album.has_key('cover_photo'): info_on_photo = self.get_data('https://graph.facebook.com/%s' % (album['cover_photo'])) if info_on_photo and info_on_photo.has_key('images'): try: item.body = info_on_photo['images'][3] except: pass album_likes = self.get_data('https://graph.facebook.com/%s/likes' % (album['id'])) if album_likes: item.likes = [] for like in album_likes['data']: user = self.get_data('https://graph.facebook.com/%s' % (like['id'])) item.likes.append({like['name'] : user['link']}) if album.has_key('name'): item.title = album['name'] item.link_back = album['link'] item.last_updated = self._convert_time_stamp(album['updated_time']) item.service = self.service item.number_of_photos = album['count'] items.append(item) return items
def _convert_link_feed(self, links, since): """Convert link feed.""" items = [] for link in links['data']: if link.has_key('created_time'): created = self._convert_time_stamp(link['created_time']) if created.date() >= since: item = ServiceItem() item.created = created item.link_back = link['link'] if link.has_key('message'): item.body = link['message'] item.url = link['link'] item.service = self.service items.append(item) return items
def _convert_photos_feed(self, photos, since): """Convert photo feed.""" items = [] for photo in photos['data']: if photo.has_key('created_time'): created = self._convert_time_stamp(photo['created_time']) if created.date() >= since: item = ServiceItem() item.created = created item.link_back = photo['link'] item.title = photo['name'] if photo.has_key('images'): try: item.body = photo['images'][3] except: pass item.url = photo['link'] item.tags = [] if photo.has_key('tags'): for tag in photo['tags']['data']: item.tags.append(tag['name']) item.comments = [] if photo.has_key('comments'): for comment in photo['comments']['data']: item.comments.append({'name' : comment['from']['name'], 'message' : comment['message'] }) item.service = self.service items.append(item) return items
def _convert_status_feed(serv, user, user_stream, uid, since): """Take the feed of status updates from facebook and convert it to ServiceItems.""" items = [] for entry in user_stream: if entry.has_key('message'): created = datetime.fromtimestamp(entry['time']) if created.date() >= since: item = ServiceItem() item.created = datetime.fromtimestamp(entry['time']) item.title = '' item.body = entry['message'] item.link_back = \ "http://www.facebook.com/%s/posts/%s?notif_t=feed_comment" % (uid, entry['status_id']) item.service = serv item.user = user item.created = created items.append(item) return items
def _convert_feed(feed, serv, user): items = [] try: feed = feed.replace('callback(', '') feed = feed.rstrip(');\n') json = simplejson.loads(feed) for entry in json['posts']: item = ServiceItem() if entry.has_key('regular-title'): item.title = entry['regular-title'] if entry.has_key('regular-body'): item.body = entry['regular-body'] updated = parser.parse(entry['date-gmt']) updated = (updated - updated.utcoffset()).replace(tzinfo=None) item.created = updated item.link_back = entry['url'] item.service = serv item.user = user items.append(item) except Exception, e: raise LoggedServiceError(original_exception=e)
def _convert_stats_feed(self, feed, since): """Take the user's atom feed. """ items = [] avatar = "" if feed and feed[0]['actor_attributes'].has_key('gravatar_id'): avatar = 'http://www.gravatar.com/avatar/%s' % (feed[0]['actor_attributes']['gravatar_id'],) commit_times = {} days_committed = generate_days_dict() for entry in feed: if entry['public']: dated, time, offset = entry['created_at'].rsplit(' ') created = self._convert_date(entry) if created.date() >= since: # extract commits from push event if entry['type'] == 'PushEvent': # fetch and get the stats on commits for commit in entry['payload']['shas']: url = "https://api.github.com/repos/%s/%s/git/commits/%s" % (self.service.auth.username, entry['repository']['name'], commit[0]) commit_detail = get_data(self.service, url, disable_oauth=True) item = ServiceItem() item.title = "Commit for %s" % (entry['repository']['name']) item.body = '"%s"' % (commit_detail['message']) item.created = self._convert_commit_date(commit_detail) if commit_detail.has_key('url'): item.link_back = commit_detail['url'] item.service = self.service items.append(item) if days_committed.has_key(item.created.date()): days_committed[item.created.date()] = days_committed[item.created.date()] + 1 hour = created.strftime('%H') if commit_times.has_key(hour): commit_times[hour] += + 1 else: commit_times[hour] = 1 else: item = ServiceItem() self._set_title_body(entry, item) item.created = created if entry.has_key('url'): item.link_back = entry['url'] item.service = self.service items.append(item) commit_times = SortedDict(sorted( commit_times.items(), reverse=True, key=lambda x: x[1] )) days_committed = SortedDict(sorted(days_committed.items(), reverse=False, key=lambda x: x[0])) max_commits_on_a_day = SortedDict(sorted(days_committed.items(), reverse=True, key=lambda x: x[1])) max_commits_on_a_day = max_commits_on_a_day[max_commits_on_a_day.keyOrder[0]] + 1 return items, avatar, commit_times, self._most_common_commit_time(commit_times), days_committed, max_commits_on_a_day
def get_stats_items(self, since): """Fetch and normalise the updates from the service and generate stats. """ user_id = self._get_username() if not user_id: return photos = self._get_oauth_v1('http://api.flickr.com/services/rest/?method=flickr.people.getPhotos&user_id=%s&format=json&nojsoncallback=1&min_upload_date=%s' % (user_id, since.strftime('%Y-%m-%d+%H:%M:%S'))) items = [] if photos and int(photos['photos']['total']) > 0: for photo in photos['photos']['photo']: item = ServiceItem() # Info about the pic pic_json = self._get_oauth_v1('http://api.flickr.com/services/rest/?method=flickr.photos.getInfo&user_id=%s&format=json&nojsoncallback=1&photo_id=%s' % (user_id, photo['id'])) # Info about how the pic was taken exif_json = self._get_oauth_v1('http://api.flickr.com/services/rest/?method=flickr.photos.getExif&user_id=%s&format=json&nojsoncallback=1&photo_id=%s' % (user_id, photo['id'])) item.camera_make, item.camera_model = self._extract_camera_type(exif_json) item.title = pic_json['photo']['title']['_content'] item.body = pic_json['photo']['description']['_content'] # Use date from when the photo was uploaded to flickr NOT when it was taken item.created = datetime.fromtimestamp(float(pic_json['photo']['dates']['posted'])) #u'posted': u'1300054696' item.link_back = pic_json['photo']['urls']['url'][0]['_content'] item.tags = pic_json['photo']['tags']['tag'] item.favorite = pic_json['photo']['isfavorite'] # Add views item.views = pic_json['photo']['views'] # Add tags item.tags = pic_json['photo']['tags']['tag'] item.number_of_comments = pic_json['photo']['comments']['_content'] item.url_thumb = "http://farm%s.static.flickr.com/%s/%s_%s_t.jpg" % ( pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret'] ) item.url_small = "http://farm%s.static.flickr.com/%s/%s_%s_m.jpg" % ( pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret'] ) item.body = '<br/><img src="%s" />' % (item.url_thumb,) # Add location item.location = {} if pic_json['photo'].has_key('location'): item.location['lat'] = pic_json['photo']['location']['latitude'] item.location['long'] = pic_json['photo']['location']['longitude'] item.service = self.service items.append(item) return items
def _convert_stats_feed(self, feed, since): """Take the user's atom feed. """ items = [] commits = [] commit_times = {} repos = {} avatar = "" list_of_commits = {} username, name = self._get_details() if not username or not feed: return if feed and feed[0]['actor_attributes'].has_key('gravatar_id'): avatar = 'http://www.gravatar.com/avatar/%s' % (feed[0]['actor_attributes']['gravatar_id'],) days_committed = generate_days_dict() for entry in feed: if entry['public']: dated, time, offset = entry['created_at'].rsplit(' ') if entry.has_key('created_at'): created = self._convert_date(entry['created_at']) if created.date() >= since: if entry.has_key('repository') \ and entry['repository'].has_key('name') \ and entry['repository'].has_key('owner'): url = "https://api.github.com/repos/%s/%s?" % ( entry['repository']['owner'], entry['repository']['name']) repo = self.get_data(url) if repo: repos[entry['repository']['owner'] + entry['repository']['name']] = repo # extract commits from push event if entry['type'] == 'PushEvent': # fetch and get the stats on commits for commit in entry['payload']['shas']: # test it we have seen the commit before if not list_of_commits.has_key(commit[0]): list_of_commits[commit[0]] = True item = ServiceItem() if name == commit[-1]: commit_detail = None if entry.has_key('repository'): url = "https://api.github.com/repos/%s/%s/git/commits/%s?" % ( entry['repository']['owner'], entry['repository']['name'], commit[0]) commit_detail = self.get_data(url) item.body = '"%s"' % (commit_detail['message']) item.title = "Commit for %s" % (entry['repository']['name']) else: if entry['type'] == 'PushEvent': item.title = "Push for %s" % (entry['url'].split('/')[4]) item.body = entry['payload']['shas'][0][2] if commit_detail and commit_detail.has_key('author') and commit_detail['author'].has_key('date'): item.created = self._convert_date(commit_detail['author']['date']) else: item.created = self._convert_date(entry['created_at']) # tag entry as private as the repo is marked private if repo and repo.has_key('private'): item.private = repo['private'] if commit_detail and commit_detail.has_key('url'): # https://github.com/bassdread/causal/commit/7aef64a152ec28846111612620b6042b21615423 #item.link_back = "https://github.com/%s/%s/commit/%s" %(entry['repository']['owner'], entry['repository']['name'], commit[0]) item.link_back = commit_detail['url'] else: item.link_back = entry['url'] item.service = self.service commits.append(item) if days_committed.has_key(item.created_local.date()): days_committed[item.created_local.date()] = days_committed[item.created_local.date()] + 1 hour = item.created_local.strftime('%H') if commit_times.has_key(hour): commit_times[hour] += 1 else: commit_times[hour] = 1 del(item) else: item = ServiceItem() self._set_title_body(entry, item) item.created = created if entry.has_key('url'): item.link_back = entry['url'] item.service = self.service items.append(item) commit_times = SortedDict(sorted( commit_times.items(), reverse=True, key=lambda x: x[1] )) days_committed = SortedDict(sorted(days_committed.items(), reverse=False, key=lambda x: x[0])) max_commits_on_a_day = SortedDict(sorted(days_committed.items(), reverse=True, key=lambda x: x[1])) max_commits_on_a_day = max_commits_on_a_day[max_commits_on_a_day.keyOrder[0]] + 1 commits.sort(key=lambda commit: commit.created_local, reverse=True) return { 'events' : items, 'commits' : commits, 'avatar' : avatar, 'commit_times' : commit_times, 'most_common_commit_time' : self._most_common_commit_time(commit_times), 'days_committed' : days_committed, 'max_commits_on_a_day' : max_commits_on_a_day, 'repos' : repos}
def get_stats_items(self, since): """Fetch and normalise the updates from the service and generate stats. """ self.flickr = flickrapi.FlickrAPI(self.service.app.auth_settings['api_key']) photos = self._get_service_items(since) items = [] if photos: for photo in photos: item = ServiceItem() # Info about the pic pic = self.flickr.photos_getInfo(photo_id=photo['id'], format='json', nojsoncallback='1') pic_json = simplejson.loads(pic) # Info about how the pic was taken exif = self.flickr.photos_getExif(photo_id=photo['id'], format='json', nojsoncallback ='1') exif_json = simplejson.loads(exif) item.camera_make, item.camera_model = self._extract_camera_type(exif_json) item.title = pic_json['photo']['title']['_content'] item.body = pic_json['photo']['description']['_content'] # Use date from when the photo was uploaded to flickr NOT when it was taken item.created = datetime.fromtimestamp(float(pic_json['photo']['dates']['posted'])) #u'posted': u'1300054696' item.link_back = pic_json['photo']['urls']['url'][0]['_content'] item.tags = pic_json['photo']['tags']['tag'] item.favorite = pic_json['photo']['isfavorite'] # Add views item.views = pic_json['photo']['views'] # Add tags item.tags = pic_json['photo']['tags']['tag'] item.number_of_comments = pic_json['photo']['comments']['_content'] item.url_thumb = "http://farm%s.static.flickr.com/%s/%s_%s_t.jpg" % ( pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret'] ) item.url_small = "http://farm%s.static.flickr.com/%s/%s_%s_m.jpg" % ( pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret'] ) item.body = '<br/><img src="%s" />' % (item.url_thumb,) # Add location item.location = {} if pic_json['photo'].has_key('location'): item.location['lat'] = pic_json['photo']['location']['latitude'] item.location['long'] = pic_json['photo']['location']['longitude'] item.service = self.service items.append(item) return items
try: stream_stream = _fetch_feed(serv, access_token, STREAM_FQL % int(week_ago_epoch)) except Exception, exception: return LoggedServiceError(original_exception=exception) for strm in stream_stream: # do we have permission from the user to post entry? # ignore if the post is entry if strm['comments']['can_post'] and strm.has_key('message'): if strm.has_key('likes') and strm['likes'].has_key('user_likes'): if strm['likes']['user_likes']: item = ServiceItem() item.liked = strm['likes']['user_likes'] item.who_else_liked = strm['likes']['href'] item.created = datetime.fromtimestamp(strm.created_time) item.body = strm.message # go off and fetch details about a user item.other_peoples_comments = [] for comment in strm['comments']['comment_list']: users = query(USER_NAME_FETCH % comment['fromid']) for user in users: user_details = { 'name' : user['name'], 'profile_pic' : user['pic_small'] } item.other_peoples_comments.append(user_details) item.service = serv item.user = user item.link_back = strm['permalink'] items.append(item)