def test_special_cases(self): feed = Feed('samruby', 'http://www.intertwingly.net/blog/index.atom') shutil.copy( pkg_resources.resource_filename(__name__, 'fixture/samruby.xml'), self.tmpdir) entries = feed.parse() self.assertEquals( 0, entries[0].resolved_link.find('http://www.intertwingly.net/blog'))
def test_parse_entries(self): feed = Feed('ongoing', 'http://www.tbray.org/ongoing/ongoing.atom') shutil.copy( pkg_resources.resource_filename(__name__, 'fixture/ongoing.xml'), self.tmpdir) entries = feed.parse() self.assertEquals('Tim Bray', feed.author) self.assertEquals(20, len(entries)) self.assertEquals('Moose Camp', entries[0].title) msg = entries[0].to_mail() subject = decode_header(msg['Subject'])[0] self.assertEquals('Moose Camp', subject[0].decode(subject[1] or 'ascii')) self.assertEquals('Tim Bray <rsspull@localhost>', msg['From'])
def test_not_updated_even_if_no_etag(self): # heise doesn't do ETAG feed = Feed('heisec', 'https://www.heise.de/security/rss/news-atom.xml') feed.download() self.assert_(feed.updated()) feed.download() self.assert_(not feed.updated())
def test_download(self): feed = Feed('ongoing', 'http://www.tbray.org/ongoing/ongoing.atom') xml = os.path.join(self.tmpdir, 'ongoing.xml') feed.download() self.assert_(os.path.exists(xml)) self.assert_(feed.updated()) before = os.stat(xml).st_mtime feed.download() after = os.stat(xml).st_mtime self.assert_(not feed.updated()) self.assertEquals(before, after)
def test_parse_opml(self): feeds = Feed.parseOPML( pkg_resources.resource_filename(__name__, 'fixture/feeds.opml')) self.assertEquals(3, len(feeds)) self.assertEquals('ongoing', feeds[0].name) self.assertEquals('http://www.tbray.org/ongoing/ongoing.atom', feeds[0].url) self.assertEquals('Trac_Example', feeds[2].name) self.assertEquals(('user', 'password'), feeds[2].auth) self.assert_( os.path.exists(os.path.join(self.tmpdir, 'Trac_Example.cache')))
def rsspull(confdir): confdir = os.path.expanduser(confdir) config = ConfigParser() config.read(os.path.join(confdir, 'config')) Feed.workdir = os.path.join(confdir, 'cache') Feed.target = os.path.expanduser(config.get('global', 'target')) Feed.target_type = os.path.expanduser(config.get('global', 'target_type')) ws.rsspull.util.setupLogging( os.path.expanduser(config.get('global', 'logfile'))) log = logging.getLogger(__name__) log.info('Reading feed configuration from %s' % confdir) feeds = Feed.parseOPML(os.path.join(confdir, 'feeds.opml')) worker_count = config.getint('global', 'workers') if worker_count > 1: rsspull_parallel(feeds, worker_count) else: rsspull_serial(feeds)
def create(): feed = Feed('ongoing', 'http://www.tbray.org/ongoing/ongoing.atom') feed.author = None entry = Entry(feed, MockItem()) return feed, entry
def test_download_error(self): feed = Feed('ongoing', 'urks://') self.assertRaises(RuntimeError, feed.download)