def rss(): response.headers['Content-Type'] = 'application/rss+xml' import gluon.contrib.rss2 as rss2 requested_groups = request.vars.groups or '1' try: requested_groups = tuple([int(i) for i in requested_groups.split(',')]) except: return '' entries=db(db.announcement.id==db.access.record_id)\ (db.access.table_name=='announcement')\ (db.access.persons_group.belongs(requested_groups))\ (db.announcement.to_rss==True)\ (db.auth_user.id==db.announcement.owner)\ .select(groupby=db.announcement.id) items = [ rss2.RSSItem(title=entry.announcement.title, link=MAIN, author=entry.auth_user.email, description=entry.announcement.body, pubDate=entry.announcement.posted_on) for entry in entries ] rss = rss2.RSS2(title='public rss for groups ' + str(requested_groups), link=MAIN, description=str(requested_groups), lastBuildDate=datetime.datetime.now(), items=items) return rss2.dumps(rss)
def planet(): FILTER = 'web2py' import datetime import re import gluon.contrib.rss2 as rss2 import gluon.contrib.feedparser as feedparser # filter for general (not categorized) feeds regex = re.compile(FILTER, re.I) # select all feeds feeds = db(db.feed).select() entries = [] for feed in feeds: # fetch and parse feeds d = feedparser.parse(feed.url) for entry in d.entries: # filter feed entries if not feed.general or regex.search(entry.description): # extract entry attributes entries.append({ 'feed': {'author':feed.author, 'link':feed.link, 'url':feed.url, 'name':feed.name}, 'title': entry.title, 'link': entry.link, 'description': entry.description, 'author': hasattr(entry, 'author_detail') \ and entry.author_detail.name \ or feed.author, 'date': datetime.datetime(*entry.date_parsed[:6]) }) # sort entries by date, descending entries.sort(key=lambda x: x['date'], reverse=True) now = datetime.datetime.now() # aggregate rss2 feed with parsed entries rss = rss2.RSS2( title="Planet web2py", link=URL("planet").encode("utf8"), description="planet author", lastBuildDate=now, items=[ rss2.RSSItem( title=entry['title'], link=entry['link'], description=entry['description'], author=entry['author'], # guid = rss2.Guid('unknown'), pubDate=entry['date']) for entry in entries ]) # return new rss feed xml response.headers['Content-Type'] = 'application/rss+xml' return rss2.dumps(rss)
def rss_aggregator(): import datetime import gluon.contrib.rss2 as rss2 import gluon.contrib.feedparser as feedparser d = feedparser.parse('http://rss.slashdot.org/Slashdot/slashdot/to') rss = rss2.RSS2(title=d.channel.title, link=d.channel.link, description=d.channel.description, lastBuildDate=datetime.datetime.now(), items=[rss2.RSSItem(title=entry.title, link=entry.link, description=entry.description, pubDate=datetime.datetime.now()) for entry in d.entries]) response.headers['Content-Type'] = 'application/rss+xml' return rss.to_xml(encoding='utf-8')
def rss(feed): if not 'entries' in feed and 'items' in feed: feed['entries'] = feed['items'] def safestr(obj, key, default=''): return str(obj[key]).encode('utf-8', 'replace') if key in obj else default now = datetime.datetime.now() rss = rss2.RSS2(title=safestr(feed,'title'), link=safestr(feed,'link'), description=safestr(feed,'description'), lastBuildDate=feed.get('created_on', now), items=[rss2.RSSItem( title=safestr(entry,'title','(notitle)'), link=safestr(entry,'link'), description=safestr(entry,'description'), pubDate=entry.get('created_on', now) ) for entry in feed.get('entries', [])]) return rss.to_xml(encoding='utf-8')
def rss(feed): if not 'entries' in feed and 'items' in feed: feed['entries'] = feed['items'] now = datetime.datetime.now() rss = rss2.RSS2( title=str(feed.get('title', '(notitle)').encode('utf-8', 'replace')), link=str(feed.get('link', None).encode('utf-8', 'replace')), description=str( feed.get('description', '').encode('utf-8', 'replace')), lastBuildDate=feed.get('created_on', now), items=[ rss2.RSSItem(title=str( entry.get('title', '(notitle)').encode('utf-8', 'replace')), link=str( entry.get('link', None).encode('utf-8', 'replace')), description=str( entry.get('description', '').encode('utf-8', 'replace')), pubDate=entry.get('created_on', now)) for entry in feed.get('entries', []) ]) return rss.to_xml(encoding='utf-8')