def test_parse_page(self):
     """Verify parsing a static html bugzilla page"""
     self.scraper = BugzillaRemoteComponentScraper(
         base_url="http://bugs.wine.org")
     page_text = read_test_file("bugzilla-wine-advanced-query.html")
     self.scraper.parsePage(page_text)
     self.assertTrue(u'Wine' in self.scraper.products)
     xorg = self.scraper.products['Wine']
     self.assertTrue(u'ole' in xorg['components'])
class TestBugzillaRemoteComponentScraper(TestCaseWithFactory):

    layer = DatabaseFunctionalLayer

    def setUp(self):
        super(TestBugzillaRemoteComponentScraper, self).setUp()

    def test_url_correction(self):
        scraper = BugzillaRemoteComponentScraper(
            base_url="http://bugzilla.sample.com/")

        # Trailing slashes are stripped from the URL
        self.assertEqual(
            scraper.base_url,
            "http://bugzilla.sample.com")

        # Query cgi string is generated from the base_url
        self.assertEqual(
            scraper.url,
            "http://bugzilla.sample.com/query.cgi?format=advanced")

    def test_dict_from_csv(self):
        """Test conversion of various CSV strings parse correctly"""

        data = [
            ("'foo'",        {'foo':     {'name': 'foo'}}),
            ("'B_A_R'",      {'B_A_R':   {'name': 'B_A_R'}}),
            ("'b@z'",        {'b@z':     {'name': 'b@z'}}),
            ("'b\\!ah'",     {'b!ah':    {'name': 'b!ah'}}),
            ("42",           {'42':      {'name': '42'}}),
            ("''",           {'':        {'name': ''}}),
            (u"uni",         {'uni':     {'name': 'uni'}}),
            ("'a', 'b','c'", {'a':       {'name': 'a'},
                              'b':       {'name': 'b'},
                              'c':       {'name': 'c'},
                              }),
            ]
        for test_case in data:
            (key, truth_dict) = test_case
            test_dict = dictFromCSV(key)
            self.assertEqual(test_dict, truth_dict)

    def test_parse_page(self):
        """Verify parsing a static html bugzilla page"""
        self.scraper = BugzillaRemoteComponentScraper(
            base_url="http://bugs.wine.org")
        page_text = read_test_file("bugzilla-wine-advanced-query.html")
        self.scraper.parsePage(page_text)
        self.assertTrue(u'Wine' in self.scraper.products)
        xorg = self.scraper.products['Wine']
        self.assertTrue(u'ole' in xorg['components'])
class TestBugzillaRemoteComponentScraper(TestCaseWithFactory):

    layer = DatabaseFunctionalLayer

    def setUp(self):
        super(TestBugzillaRemoteComponentScraper, self).setUp()

    def test_url_correction(self):
        scraper = BugzillaRemoteComponentScraper(
            base_url="http://bugzilla.sample.com/")

        # Trailing slashes are stripped from the URL
        self.assertEqual(
            scraper.base_url,
            "http://bugzilla.sample.com")

        # Query cgi string is generated from the base_url
        self.assertEqual(
            scraper.url,
            "http://bugzilla.sample.com/query.cgi?format=advanced")

    def test_dict_from_csv(self):
        """Test conversion of various CSV strings parse correctly"""

        data = [
            ("'foo'",        {'foo':     {'name': 'foo'}}),
            ("'B_A_R'",      {'B_A_R':   {'name': 'B_A_R'}}),
            ("'b@z'",        {'b@z':     {'name': 'b@z'}}),
            ("'b\\!ah'",     {'b!ah':    {'name': 'b!ah'}}),
            ("42",           {'42':      {'name': '42'}}),
            ("''",           {'':        {'name': ''}}),
            (u"uni",         {'uni':     {'name': 'uni'}}),
            ("'a', 'b','c'", {'a':       {'name': 'a'},
                              'b':       {'name': 'b'},
                              'c':       {'name': 'c'},
                              }),
            ]
        for test_case in data:
            (key, truth_dict) = test_case
            test_dict = dictFromCSV(key)
            self.assertEqual(test_dict, truth_dict)

    def test_parse_page(self):
        """Verify parsing a static html bugzilla page"""
        self.scraper = BugzillaRemoteComponentScraper(
            base_url="http://bugs.wine.org")
        page_text = read_test_file("bugzilla-wine-advanced-query.html")
        self.scraper.parsePage(page_text)
        self.assertTrue(u'Wine' in self.scraper.products)
        xorg = self.scraper.products['Wine']
        self.assertTrue(u'ole' in xorg['components'])
Beispiel #4
0
    def test_store(self):
        """Check that already-parsed data gets stored to database"""
        lp_bugtracker = self.factory.makeBugTracker()
        transaction.commit()

        # Set up remote bug tracker with synthetic data
        bz_bugtracker = BugzillaRemoteComponentScraper(
            base_url="http://bugzilla.example.org")
        bz_bugtracker.products = {
            u'alpha': {
                'name': u'alpha',
                'components': {
                    u'1': {
                        'name': u'1',
                    },
                    u'2': {
                        'name': u'2',
                    },
                    u'3': {
                        'name': u'3',
                    },
                },
                'versions': None,
            },
            u'beta': {
                'name': u'beta',
                'components': {
                    u'4': {
                        'name': u'4',
                    },
                },
                'versions': None,
            }
        }
        finder = BugzillaRemoteComponentFinder(logger=BufferLogger())
        finder.storeRemoteProductsAndComponents(bz_bugtracker, lp_bugtracker)

        # Verify the data got stored properly
        comp_groups = lp_bugtracker.getAllRemoteComponentGroups()
        self.assertEqual(2, len(list(comp_groups)))
        comp_group = lp_bugtracker.getRemoteComponentGroup(u'alpha')
        self.assertEqual(3, len(list(comp_group.components)))
        comp_group = lp_bugtracker.getRemoteComponentGroup(u'beta')
        self.assertEqual(1, len(list(comp_group.components)))
        comp = comp_group.getComponent(u'non-existant')
        self.assertIs(None, comp)
        comp = comp_group.getComponent(u'4')
        self.assertEqual(u'4', comp.name)
 def test_parse_page(self):
     """Verify parsing a static html bugzilla page"""
     self.scraper = BugzillaRemoteComponentScraper(
         base_url="http://bugs.wine.org")
     page_text = read_test_file("bugzilla-wine-advanced-query.html")
     self.scraper.parsePage(page_text)
     self.assertTrue(u'Wine' in self.scraper.products)
     xorg = self.scraper.products['Wine']
     self.assertTrue(u'ole' in xorg['components'])
    def test_store(self):
        """Check that already-parsed data gets stored to database"""
        lp_bugtracker = self.factory.makeBugTracker()
        transaction.commit()

        # Set up remote bug tracker with synthetic data
        bz_bugtracker = BugzillaRemoteComponentScraper(
            base_url="http://bugzilla.example.org")
        bz_bugtracker.products = {
            u'alpha': {
                'name': u'alpha',
                'components': {
                    u'1': {'name': u'1', },
                    u'2': {'name': u'2', },
                    u'3': {'name': u'3', },
                    },
                'versions': None,
                },
            u'beta': {
                'name': u'beta',
                'components': {
                    u'4': {'name': u'4', },
                    },
                'versions': None,
                }
            }
        finder = BugzillaRemoteComponentFinder(
            logger=BufferLogger())
        finder.storeRemoteProductsAndComponents(
            bz_bugtracker, lp_bugtracker)

        # Verify the data got stored properly
        comp_groups = lp_bugtracker.getAllRemoteComponentGroups()
        self.assertEqual(2, len(list(comp_groups)))
        comp_group = lp_bugtracker.getRemoteComponentGroup(u'alpha')
        self.assertEqual(3, len(list(comp_group.components)))
        comp_group = lp_bugtracker.getRemoteComponentGroup(u'beta')
        self.assertEqual(1, len(list(comp_group.components)))
        comp = comp_group.getComponent(u'non-existant')
        self.assertIs(None, comp)
        comp = comp_group.getComponent(u'4')
        self.assertEqual(u'4', comp.name)
Beispiel #7
0
    def test_url_correction(self):
        scraper = BugzillaRemoteComponentScraper(
            base_url="http://bugzilla.sample.com/")

        # Trailing slashes are stripped from the URL
        self.assertEqual(scraper.base_url, "http://bugzilla.sample.com")

        # Query cgi string is generated from the base_url
        self.assertEqual(
            scraper.url,
            "http://bugzilla.sample.com/query.cgi?format=advanced")
 def __init__(self, error=None):
     BugzillaRemoteComponentScraper.__init__(
         self, "http://www.example.com")
     self.error = error
 def __init__(self):
     BugzillaRemoteComponentScraper.__init__(
         self, "http://www.example.com")
Beispiel #10
0
 def __init__(self, error=None):
     BugzillaRemoteComponentScraper.__init__(self, "http://www.example.com")
     self.error = error
Beispiel #11
0
 def __init__(self):
     BugzillaRemoteComponentScraper.__init__(self, "http://www.example.com")