def test_parse_page(self): """Verify parsing a static html bugzilla page""" self.scraper = BugzillaRemoteComponentScraper( base_url="http://bugs.wine.org") page_text = read_test_file("bugzilla-wine-advanced-query.html") self.scraper.parsePage(page_text) self.assertTrue(u'Wine' in self.scraper.products) xorg = self.scraper.products['Wine'] self.assertTrue(u'ole' in xorg['components'])
def test_url_correction(self): scraper = BugzillaRemoteComponentScraper( base_url="http://bugzilla.sample.com/") # Trailing slashes are stripped from the URL self.assertEqual(scraper.base_url, "http://bugzilla.sample.com") # Query cgi string is generated from the base_url self.assertEqual( scraper.url, "http://bugzilla.sample.com/query.cgi?format=advanced")
def test_store(self): """Check that already-parsed data gets stored to database""" lp_bugtracker = self.factory.makeBugTracker() transaction.commit() # Set up remote bug tracker with synthetic data bz_bugtracker = BugzillaRemoteComponentScraper( base_url="http://bugzilla.example.org") bz_bugtracker.products = { u'alpha': { 'name': u'alpha', 'components': { u'1': { 'name': u'1', }, u'2': { 'name': u'2', }, u'3': { 'name': u'3', }, }, 'versions': None, }, u'beta': { 'name': u'beta', 'components': { u'4': { 'name': u'4', }, }, 'versions': None, } } finder = BugzillaRemoteComponentFinder(logger=BufferLogger()) finder.storeRemoteProductsAndComponents(bz_bugtracker, lp_bugtracker) # Verify the data got stored properly comp_groups = lp_bugtracker.getAllRemoteComponentGroups() self.assertEqual(2, len(list(comp_groups))) comp_group = lp_bugtracker.getRemoteComponentGroup(u'alpha') self.assertEqual(3, len(list(comp_group.components))) comp_group = lp_bugtracker.getRemoteComponentGroup(u'beta') self.assertEqual(1, len(list(comp_group.components))) comp = comp_group.getComponent(u'non-existant') self.assertIs(None, comp) comp = comp_group.getComponent(u'4') self.assertEqual(u'4', comp.name)