def eval(self, name):
        # read the test case
        try:
            testcase = open(testfiles % name)
            data = testcase.read()
            description, expect = self.desc_re.search(data).groups()
            testcase.close()
        except:
            raise RuntimeError, "can't parse %s" % name

        # parse and reconstitute to a string
        work = StringIO.StringIO()
        results = feedparser.parse(data)
        scrub(testfiles % name, results)
        reconstitute(results, results.entries[0]).writexml(work)

        # verify the results
        results = feedparser.parse(work.getvalue().encode('utf-8'))
        if 'illegal' not in name:
            self.assertFalse(results.bozo, 'xml is well formed')
        if not self.simple_re.match(expect):
            self.assertTrue(eval(expect, results.entries[0]), expect)
        else:
            lhs, rhs = self.simple_re.match(expect).groups()
            self.assertEqual(eval(rhs), eval(lhs, results.entries[0]))
    def eval(self, name):
        # read the test case
        try:
            testcase = open(testfiles % name)
            data = testcase.read()
            description, expect = self.desc_re.search(data).groups()
            testcase.close()
        except:
            raise RuntimeError, "can't parse %s" % name

        # parse and reconstitute to a string
        work = StringIO.StringIO()
        results = feedparser.parse(data)
        scrub(testfiles%name, results)
        reconstitute(results, results.entries[0]).writexml(work)

        # verify the results
        results = feedparser.parse(work.getvalue().encode('utf-8'))
        if 'illegal' not in name:
            self.assertFalse(results.bozo, 'xml is well formed')
        if not self.simple_re.match(expect):
            self.assertTrue(eval(expect, results.entries[0]), expect)
        else:
            lhs, rhs = self.simple_re.match(expect).groups()
            self.assertEqual(eval(rhs), eval(lhs, results.entries[0]))
    def test_scrub_xmlbase(self):
        base = feedparser.parse(feed)
        self.assertEqual('http://example.com/',
             base.entries[0].title_detail.base)

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set('testfeed', 'xml_base', 'feed_alternate')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual('http://example.com/feed/',
             data.entries[0].title_detail.base)

        config.parser.set('testfeed', 'xml_base', 'entry_alternate')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual('http://example.com/entry/1/',
             data.entries[0].title_detail.base)

        config.parser.set('testfeed', 'xml_base', 'base/')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual('http://example.com/base/',
             data.entries[0].title_detail.base)

        config.parser.set('testfeed', 'xml_base', 'http://example.org/data/')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual('http://example.org/data/',
             data.entries[0].title_detail.base)
Beispiel #4
0
    def test_scrub_xmlbase(self):
        base = feedparser.parse(feed)
        self.assertEqual('http://example.com/',
                         base.entries[0].title_detail.base)

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set('testfeed', 'xml_base', 'feed_alternate')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual('http://example.com/feed/',
                         data.entries[0].title_detail.base)

        config.parser.set('testfeed', 'xml_base', 'entry_alternate')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual('http://example.com/entry/1/',
                         data.entries[0].title_detail.base)

        config.parser.set('testfeed', 'xml_base', 'base/')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual('http://example.com/base/',
                         data.entries[0].title_detail.base)

        config.parser.set('testfeed', 'xml_base', 'http://example.org/data/')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual('http://example.org/data/',
                         data.entries[0].title_detail.base)
    def test_scrub_future(self):
        base = feedparser.parse(feed)
        self.assertEqual(1, len(base.entries))
        self.assertTrue(base.entries[0].has_key('updated'))

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set('testfeed', 'future_dates', 'ignore_date')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertFalse(data.entries[0].has_key('updated'))

        config.parser.set('testfeed', 'future_dates', 'ignore_entry')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual(0, len(data.entries))
Beispiel #6
0
    def test_scrub_future(self):
        base = feedparser.parse(feed)
        self.assertEqual(1, len(base.entries))
        self.assertTrue('updated' in base.entries[0].keys())

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set('testfeed', 'future_dates', 'ignore_date')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertFalse('updated' in data.entries[0].keys())

        config.parser.set('testfeed', 'future_dates', 'ignore_entry')
        data = deepcopy(base)
        scrub('testfeed', data)
        self.assertEqual(0, len(data.entries))
    def test_scrub_future(self):
        base = feedparser.parse(feed)
        self.assertEqual(1, len(base.entries))
        self.assertTrue(base.entries[0].has_key("updated"))

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set("testfeed", "future_dates", "ignore_date")
        data = deepcopy(base)
        scrub("testfeed", data)
        self.assertFalse(data.entries[0].has_key("updated"))

        config.parser.set("testfeed", "future_dates", "ignore_entry")
        data = deepcopy(base)
        scrub("testfeed", data)
        self.assertEqual(0, len(data.entries))
    def test_scrub_type(self):
        base = feedparser.parse(feed)

        self.assertEqual('Föo', base.feed.author_detail.name)

        config.parser.readfp(StringIO.StringIO(configData))
        data = deepcopy(base)
        scrub('testfeed', data)

        self.assertEqual('F\xc3\xb6o', data.feed.author_detail.name)
        self.assertEqual('F\xc3\xb6o', data.entries[0].author_detail.name)
        self.assertEqual('F\xc3\xb6o', data.entries[0].source.author_detail.name)

        self.assertEqual('text/html', data.entries[0].title_detail.type)
        self.assertEqual('text/html', data.entries[0].summary_detail.type)
        self.assertEqual('text/html', data.entries[0].content[0].type)
Beispiel #9
0
    def test_scrub_type(self):
        base = feedparser.parse(feed)

        self.assertEqual('Föo', base.feed.author_detail.name)

        config.parser.readfp(StringIO.StringIO(configData))
        data = deepcopy(base)
        scrub('testfeed', data)

        self.assertEqual('F\xc3\xb6o', data.feed.author_detail.name)
        self.assertEqual('F\xc3\xb6o', data.entries[0].author_detail.name)
        self.assertEqual('F\xc3\xb6o',
                         data.entries[0].source.author_detail.name)

        self.assertEqual('text/html', data.entries[0].title_detail.type)
        self.assertEqual('text/html', data.entries[0].summary_detail.type)
        self.assertEqual('text/html', data.entries[0].content[0].type)
Beispiel #10
0
    def test_scrub_ignore(self):
        base = feedparser.parse(feed)

        self.assertTrue(base.entries[0].has_key('id'))
        self.assertTrue(base.entries[0].has_key('updated'))
        self.assertTrue(base.entries[0].has_key('updated_parsed'))
        self.assertTrue(base.entries[0].summary_detail.has_key('language'))

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set('testfeed', 'ignore_in_feed', 'id updated xml:lang')
        data = deepcopy(base)
        scrub('testfeed', data)

        self.assertFalse(data.entries[0].has_key('id'))
        self.assertFalse(data.entries[0].has_key('updated'))
        self.assertFalse(data.entries[0].has_key('updated_parsed'))
        self.assertFalse(data.entries[0].summary_detail.has_key('language'))
Beispiel #11
0
    def test_scrub_ignore(self):
        base = feedparser.parse(feed)

        self.assertTrue(base.entries[0].has_key("author"))
        self.assertTrue(base.entries[0].has_key("author_detail"))
        self.assertTrue(base.entries[0].has_key("id"))
        self.assertTrue(base.entries[0].has_key("updated"))
        self.assertTrue(base.entries[0].has_key("updated_parsed"))
        self.assertTrue(base.entries[0].summary_detail.has_key("language"))

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set("testfeed", "ignore_in_feed", "author id updated xml:lang")
        data = deepcopy(base)
        scrub("testfeed", data)

        self.assertFalse(data.entries[0].has_key("author"))
        self.assertFalse(data.entries[0].has_key("author_detail"))
        self.assertFalse(data.entries[0].has_key("id"))
        self.assertFalse(data.entries[0].has_key("updated"))
        self.assertFalse(data.entries[0].has_key("updated_parsed"))
        self.assertFalse(data.entries[0].summary_detail.has_key("language"))
Beispiel #12
0
    def test_scrub_ignore(self):
        base = feedparser.parse(feed)

        self.assertTrue('author' in base.entries[0].keys())
        self.assertTrue('author_detail' in base.entries[0].keys())
        self.assertTrue('id' in base.entries[0].keys())
        self.assertTrue('updated' in base.entries[0].keys())
        self.assertTrue('updated_parsed' in base.entries[0].keys())
        self.assertTrue('language' in base.entries[0].summary_detail.keys())

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set('testfeed', 'ignore_in_feed',
                          'author id updated xml:lang')
        data = deepcopy(base)
        scrub('testfeed', data)

        self.assertFalse('author' in data.entries[0].keys())
        self.assertFalse('author_detail' in data.entries[0].keys())
        self.assertFalse('id' in data.entries[0].keys())
        self.assertFalse('updated' in data.entries[0].keys())
        self.assertFalse('updated_parsed' in data.entries[0].keys())
        self.assertFalse('language' in data.entries[0].summary_detail.keys())
Beispiel #13
0
    def test_scrub_xmlbase(self):
        base = feedparser.parse(feed)
        self.assertEqual("http://example.com/", base.entries[0].title_detail.base)

        config.parser.readfp(StringIO.StringIO(configData))
        config.parser.set("testfeed", "xml_base", "feed_alternate")
        data = deepcopy(base)
        scrub("testfeed", data)
        self.assertEqual("http://example.com/feed/", data.entries[0].title_detail.base)

        config.parser.set("testfeed", "xml_base", "entry_alternate")
        data = deepcopy(base)
        scrub("testfeed", data)
        self.assertEqual("http://example.com/entry/1/", data.entries[0].title_detail.base)

        config.parser.set("testfeed", "xml_base", "base/")
        data = deepcopy(base)
        scrub("testfeed", data)
        self.assertEqual("http://example.com/base/", data.entries[0].title_detail.base)

        config.parser.set("testfeed", "xml_base", "http://example.org/data/")
        data = deepcopy(base)
        scrub("testfeed", data)
        self.assertEqual("http://example.org/data/", data.entries[0].title_detail.base)
Beispiel #14
0
 def doScrub(data):
     scrub.scrub(self.url, data)
     return data
Beispiel #15
0
 def doScrub(data):
     scrub.scrub(self.url, data)
     return data