def new_articles(feedid, feedblob): new = rel(db.r.articles.header)() with ns(fid=feedid): ids = db.r.articles.where('feedid == fid').compute('seqno') nextid = functools.reduce(max, ids, next(ids, 0)) + 1 for a in reversed(feedblob.entries): with ns(a=a): if db.r.articles.where('guid == a.id'): # Do we need to check pubdate in case of update? continue if 'published_parsed' in a: pubdate = datetime(*a.published_parsed[:7]) else: pubdate = datetime(1900, 1, 1) new_article = row(guid=a.id, feedid=feedid, seqno=nextid, title=a.title, link=a.link, data=a, pubdate=pubdate, read=False) db.r.articles.insert(~new_article) new = new | ~new_article nextid += 1 print(feedblob.feed.get('title', '**Unknown Title**')) print(new >> {'title', 'pubdate'})
def articlelist_content(feedid, showall): with dinsd.ns(id=feedid): articles = syn.db.r.articles.where( 'feedid == id' + ('' if showall else ' and not read')) yield linktable( link('Refresh', '/feed/refresh/{}'.format(feedid)), link('Hide Read' if showall else 'Show All', '/feed/{}'.format(feedid) + '' if showall else '?showall'), link('Feed List', '/')) if articles: articles = [(x.title, x.seqno, x.pubdate, x.data.author_detail.get('name', '') if 'author_detail' in x.data else '') for x in articles] articles.sort(key=operator.itemgetter(1)) articles = [(link(t, '/article/nav/markread/{}/{}'.format(feedid, n)), a, '{:%Y-%m-%d %H:%M}'.format(p), link('X', '/feed/nav/markread/{}/{}'.format(feedid, n))) for (t, n, p, a) in articles] yield table(('Title', 'Author', 'Published', ''), articles) yield linktable( link('Refresh', '/feed/refresh/{}'.format(feedid)), link('Hide Read' if showall else 'Show All', '/feed/{}'.format(feedid) + '' if showall else '?showall'), link('Feed List', '/'))
def _article_nav(environ, respond, direction): feedid, seqno, feed, article = _get_article_from_args(environ) nextsno = seqno+direction with dinsd.ns(fid=feedid, sno=seqno, nextsno=nextsno): article = syn.db.r.articles.where('feedid==fid and seqno==nextsno') if article: nextpage = '/article/nav/markread/{}/{}'.format(feedid, nextsno) else: nextpage = '/' raise Redirect(nextpage)
def listarticles(args): with ns(wanted=args.feedid): articles = db.r.articles.where('feedid == wanted') if not articles and not db.r.feedlist.where('id == wanted'): raise FeedmeError("Unknown feed id {}".format(args.feedid)) if args.all: cols = ('guid', 'title', 'link', 'pubdate', 'read') else: cols = ('pubdate', 'title', 'read') print((articles >> cols).display(*cols, sort=('pubdate')))
def _get_feed_from_args(environ): args = environ['PATH_INFO'] try: feedid = int(args) except ValueError as err: raise NotFound('Invalid feed id {}'.format(args)) from err with dinsd.ns(feedid=feedid): feed = syn.db.r.feedlist.where('id == feedid') if not feed: raise NotFound('Feed {} not found in DB'.format(feedid)) return feedid, feed
def delfeed(args): with ns(todel=args.feedid): count = len(db.r.articles.where("feedid == todel")) title = (~db.r.feedlist.where("id == todel")).title ans = input("Delete {!r} and {} articles? (y/n): ".format(title, count)) if ans != 'y': print('aborting') return db.r.articles.delete("feedid == todel") db.r.feedlist.delete("id == todel") print('Done.')
def pollfeed(args): with ns(wanted=args.feedid): feed = db.r.feedlist.where("id == wanted") if not feed: raise FeedmeError("Unknown feed id {}".format(args.feedid)) url = (~feed).url try: feedblob = feedparser.parse(url) except Exception as err: print("Unable to read feed {}: {}".format(url, err)) return new_articles(args.feedid, feedblob)
def _check_row_constraint(self, relname, r, rw): row_validator = ' and '.join( "({})".format(v) for v in self.row_constraints[relname].values()) if row_validator: with _dinsd.ns(self._system_ns.current): if not eval(row_validator, _expns, rw._as_locals()): # find first failing constraint to put in error message for c, exp in sorted(self.row_constraints[relname].items()): if not eval(exp, _expns, rw._as_locals()): raise RowConstraintError(relname, c, exp, rw) raise AssertionError("Expected failure did not happen")
def _get_article_from_args(environ): args = environ['PATH_INFO'] try: feedid, seqno = map(int, args.split('/')) except ValueError: raise NotFound('Invalid articleid id {}'.format(args)) with dinsd.ns(fid=feedid, sno=seqno): feed = ~syn.db.r.feedlist.where('id == fid') if not feed: raise NotFound('Feed {} not found in DB'.format(feedid)) article = ~syn.db.r.articles.where('feedid==fid and seqno==sno') if not article: raise NotFound('article {} not found in DB'.format(args)) return feedid, seqno, feed, article
def _check_row_constraints(self, relname, r): row_validator = ' and '.join( "({})".format(v) for v in self.row_constraints[relname].values()) if row_validator: with _dinsd.ns(self._system_ns.current): invalid = r.where("not ({})".format(row_validator)) if invalid: # figure out one constraint and one row to put in error # message; this is more useful than all of the constraints # and all of the failed rows. rw = sorted(invalid)[0] for c, exp in sorted(self.row_constraints[relname].items()): if not eval(exp, _expns, rw._as_locals()): raise RowConstraintError(relname, c, exp, rw) raise AssertionError("Expected failure did not happen")
def feedlist_content(environ): settings = parse_qs(environ['QUERY_STRING']) showall = settings.get('showall', False) with dinsd.ns(articles=syn.db.r.articles): feedlist = syn.db.r.feedlist.extend(unread= 'len(articles.where("feedid=={} and not read".format(id)))') selectfunc = (lambda x: True) if showall else (lambda x: x) feedlist = [(x.unread, x.title, x.id, x.url) for x in feedlist if selectfunc(x.unread)] feedlist.sort(key=operator.itemgetter(1)) feedlist = [(u, link(t, '/feed/{}'.format(i)), url) for (u, t, i, url) in feedlist] yield table(('Unread', 'Title', 'URL'), feedlist) yield linktable( link('Refresh All', '/refresh'), link('Hide Read' if showall else 'Show All', '/' + '' if showall else '?showall=1'))
def addfeed(args): with ns(newurl=args.url): if db.r.feedlist.where('url == newurl'): print("Feed already exists in database:", args.url) return ids = db.r.feedlist.compute('id') newid = functools.reduce(max, ids, next(ids, 0)) + 1 try: f = feedparser.parse(args.url) except Exception as err: print("Unable to read feed {}: {}".format(args.url, err)) return title = f.feed.get('title', '**Unknown Title**') subtitle = f.feed.get('subtitle') if subtitle is None: subtitle = f.feed.get('description', '') db.r.feedlist.insert(~row(id=newid, url=args.url, title=title, subtitle=subtitle)) pubtuple = f.feed.get('published_parsed') if pubtuple: published = datetime(*pubtuple[:7]) db.r.published.insert(~row(id=newid, published=published)) else: db.r.published_unknown.insert(~row(id=newid)) print('Added new feed:') print((~row(id=newid, url=args.url, title=title, subtitle=subtitle, published=str(published) if pubtuple else '', ) ).display('id', 'title', 'subtitle', 'published', 'url', ) ) new_articles(newid, f)
def article_body(article): with dinsd.ns(fid=article.feedid): unreadcount = len(syn.db.r.articles.where('feedid==fid and not read')) unreadlabel = '({} unread)'.format(unreadcount) yield '<div style="max-width:8in">' # XXX: Do 'today' and 'yesterday' and weekdays if 'author_detail' in article.data and 'name' in article.data.author_detail: author = ' by ' + article.data.author_detail.name else: author = '' byline = 'Posted {:%Y-%m-%d %H:%M}{}'.format(article.pubdate, author) for line in linktable(byline, '', unreadlabel): yield ' ' + line if 'content' in article.data: if (len(article.data.summary) < 200 and not '<img' in article.data.summary): yield ' <em>{}</em>'.format(article.data.summary) for line in article.data.content[0].value.splitlines(): yield ' ' + line else: for line in article.data.summary.splitlines(): yield ' ' + line yield '</div>'
def _change_article_read(environ, respond, changefunc, successurl): feedid, seqno, _, _ = _get_article_from_args(environ) with dinsd.ns(fid=feedid, sno=seqno, changefunc=changefunc): syn.db.r.articles.update('feedid==fid and seqno==sno', read="changefunc(read)") raise Redirect(successurl.format(feedid=feedid, seqno=seqno))