def get_items(user, since, model_instance=None): serv = model_instance or get_model_instance(user, __name__) items = [] at = AccessToken.objects.get(service=serv) try: feed = feedparser.parse('http://www.google.com/reader/public/atom/user/%s/state/com.google/broadcast' % (at.userid,)) for entry in feed.entries: updated = parser.parse(entry.updated) updated = (updated - updated.utcoffset()).replace(tzinfo=None) if updated.date() >= since: item = ServiceItem() item.title = entry.title # we dont take the summary as its huge #if entry.has_key('summary'): item.body = '' if entry.has_key('links'): item.link_back = entry['links'] if entry.has_key('link'): item.link_back = entry['link'] item.created = updated item.service = serv item.user = user # for stats o = urlparse(entry.source.link) item.source = o.netloc item.author = entry.author # person making comment # entry.content[0].value == coment items.append(item) except Exception, e: raise LoggedServiceError(original_exception=e)
def _convert_feed(serv, user, json, since): """Convert the json feed into Service Items limiting on since""" items = [] # FIXME add filter on date! for entry in json: # check we have a valid feed if entry != '404 Not Found': item = ServiceItem() created = datetime.strptime(entry['dt'], '%Y-%m-%dT%H:%M:%SZ') #'2010-11-23T22:03:29Z' if created.date() >= since: try: item.title = entry['d'] item.body = entry['n'] item.created = created item.link_back = entry['u'] item.notes = entry['n'] item.tags = entry['t'] item.service = serv item.user = user items.append(item) except: pass return items
def _convert_feed(user, serv, feed, since, screen_name): """Take the json and convert to ServiceItems""" items = [] for status in feed: # we are interested in tweets since if status.created_at.date() > since: item = ServiceItem() item.location = {} twitter_text = TwitterText(status.text) twitter_text.autolink.auto_link_usernames_or_lists() twitter_text.autolink.auto_link_hashtags() item.body = unicode(twitter_text.text) if re.search("http://yfrog.com/\S*", item.body) \ or re.search("http://twitpic.com/\S*", item.body): item.pic_link = True item.created = status.created_at item.link_back = "http://twitter.com/%s/status/%s" % \ (screen_name, str(status.id)) if status.geo: item.location['lat'] = status.geo['coordinates'][0] item.location['long'] = status.geo['coordinates'][1] item.service = serv item.user = user items.append(item) return items
def get_items(user, since, model_instance): """Fetch and normalise the updates from the service.""" serv = model_instance or get_model_instance(user, __name__) access_token = AccessToken.objects.get(service=serv) flickr = flickrapi.FlickrAPI(access_token.api_token) photos = _get_service_items(user, model_instance, flickr, serv, access_token) items = [] if photos and photos['photos'].has_key('photo'): for photo in photos['photos']['photo']: # info about the pic pic = flickr.photos_getInfo(photo_id=photo['id'], format='json', nojsoncallback='1') pic_json = simplejson.loads(pic) item = ServiceItem() item.title = pic_json['photo']['title']['_content'] # use date from when the photo was uploaded to flickr NOT when it was taken item.created = datetime.fromtimestamp(float(pic_json['photo']['dates']['posted'])) #u'posted': u'1300054696' item.service = serv item.link_back = pic_json['photo']['urls']['url'][0]['_content'] item.tags = pic_json['photo']['tags']['tag'] item.favorite = pic_json['photo']['isfavorite'] item.url_thumb = "http://farm%s.static.flickr.com/%s/%s_%s_t.jpg" % \ (pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret']) item.url_small = "http://farm%s.static.flickr.com/%s/%s_%s_m.jpg" % \ (pic_json['photo']['farm'], pic_json['photo']['server'], pic_json['photo']['id'], pic_json['photo']['secret']) item.body = "<br/><img src='" + item.url_thumb +"'/>" # add location item.user = user items.append(item) return items
def _convert_feed(serv, user, json, since): """Take the raw json from the feed and convert it to ServiceItems.""" items = [] if json and json.has_key('checkins'): for checkin in json['checkins']: # grab the +0000 bit on the end of the date and use it make the time right offset = checkin['created'].rsplit(' ')[-1] offset = offset[1:] offset = offset[:2] time_offset = timedelta(hours=int(offset)) created = datetime.strptime(checkin['created'].replace(' +0000', ''), '%a, %d %b %y %H:%M:%S') #'Fri, 04 Feb 11 12:42:38 +0000' created = created + time_offset if created.date() >= since: item = ServiceItem() item.location = {} item.link_back = 'http://foursquare.com/venue/%s' % checkin['venue']['id'] item.title = checkin['venue']['name'] if checkin.has_key('shout') and checkin['shout']: item.body = checkin['shout'] else: item.body = checkin['venue']['city'] if checkin['venue'].has_key('geolat') and checkin['venue']['geolat']: item.location['lat'] = checkin['venue']['geolat'] item.location['long'] = checkin['venue']['geolong'] item.created = created item.service = serv if checkin['venue'].has_key('primarycategory'): item.icon = checkin['venue']['primarycategory']['iconurl'] item.user = user items.append(item) del(item) return items
def _convert_recent_tracks_json(user, serv, json): """Convert the json returned from getrecenttrack into ServiceItems.""" items = [] if json.has_key('recenttracks') and json['recenttracks'].has_key('track'): for track in json['recenttracks']['track']: if track.has_key('date'): item = ServiceItem() item.title = track['name'] item.body = 'by %s' % (track['artist']['#text'],) item.link_back = track['url'] item.created = datetime.strptime(track['date']['#text'], '%d %b %Y, %H:%M') item.service = serv item.user = user items.append(item) if type(item.created) == tuple and len(item.created): item.created = item.created[0] return items
def _convert_feed(user, serv, feed, since): """Take the user's atom feed.""" items = [] for entry in feed: if entry.has_key('public') and entry['public']: created = _convert_date(entry) if created.date() > since: item = ServiceItem() _set_title_body(entry, item) item.created = created if entry['type'] == 'GistEvent': item.link_back = entry['payload']['url'] else: if entry.has_key('url'): item.link_back = entry['url'] item.service = serv item.user = user items.append(item) return items
def _convert_status_feed(serv, user, user_stream, uid, since): """Take the feed of status updates from facebook and convert it to ServiceItems.""" items = [] for entry in user_stream: if entry.has_key('message'): created = datetime.fromtimestamp(entry['time']) if created.date() >= since: item = ServiceItem() item.created = datetime.fromtimestamp(entry['time']) item.title = '' item.body = entry['message'] item.link_back = \ "http://www.facebook.com/%s/posts/%s?notif_t=feed_comment" % (uid, entry['status_id']) item.service = serv item.user = user item.created = created items.append(item) return items
def _convert_feed(feed, serv, user): items = [] try: feed = feed.replace('callback(', '') feed = feed.rstrip(');\n') json = simplejson.loads(feed) for entry in json['posts']: item = ServiceItem() if entry.has_key('regular-title'): item.title = entry['regular-title'] if entry.has_key('regular-body'): item.body = entry['regular-body'] updated = parser.parse(entry['date-gmt']) updated = (updated - updated.utcoffset()).replace(tzinfo=None) item.created = updated item.link_back = entry['url'] item.service = serv item.user = user items.append(item) except Exception, e: raise LoggedServiceError(original_exception=e)
def _convert_stats_feed(user, serv, feed, since): """Take the user's atom feed.""" items = [] avatar = "" if feed: if feed[0]['actor_attributes'].has_key('gravatar_id'): avatar = 'http://www.gravatar.com/avatar/%s' % feed[0]['actor_attributes']['gravatar_id'] commit_times = {} for entry in feed: if entry['public']: date, time, offset = entry['created_at'].rsplit(' ') created = _convert_date(entry) if created.date() > since: hour = created.strftime('%H') if commit_times.has_key(hour + ' ish'): commit_times[hour+' ish'] = commit_times[hour+' ish'] + 1 else: commit_times[hour+' ish'] = 1 item = ServiceItem() _set_title_body(entry, item) item.created = created item.link_back = entry['url'] item.service = serv item.user = user items.append(item) commit_times = SortedDict(sorted(commit_times.items(), reverse=True, key=lambda x: x[1])) return items, avatar, commit_times
def _convert_link_feed(serv, user, stream, since): """Convert link feed.""" items = [] for entry in stream: if entry.has_key('created_time'): created = datetime.fromtimestamp(entry['created_time']) if created.date() >= since: item = ServiceItem() item.created = datetime.fromtimestamp(entry.created_time) item.link_back = entry.url item.title = entry.title if entry.summary: item.body = entry.summary else: item.body = entry.owner_comment item.url = entry.url item.service = serv item.user = user items.append(item) return items
item.who_else_liked = strm['likes']['href'] item.created = datetime.fromtimestamp(strm.created_time) item.body = strm.message # go off and fetch details about a user item.other_peoples_comments = [] for comment in strm['comments']['comment_list']: users = query(USER_NAME_FETCH % comment['fromid']) for user in users: user_details = { 'name' : user['name'], 'profile_pic' : user['pic_small'] } item.other_peoples_comments.append(user_details) item.service = serv item.user = user item.link_back = strm['permalink'] items.append(item) # get pics posted # fetch albums albums = _fetch_albums_json(serv, access_token.oauth_token) for album in albums['data']: if album.has_key('updated_time'): updated = datetime.strptime(album['updated_time'].split('+')[0], '%Y-%m-%dT%H:%M:%S') #'2007-06-26T17:55:03+0000' if updated.date() > since: photo_feed = _fetch_photos_from_album_json(album['id'], serv, access_token.oauth_token) # skim through each pic to find the new ones