def test_cookies(self):
     '''
     Test cookie getting and setting.
     '''
     scraper = WebScraper(None, None, TEST_BASE)
     cookies = scraper.get_cookies()
     scraper.set_cookies(cookies)
     self.assertEquals(len(cookies), 0)
Exemple #2
0
 def test_basic(self):
     '''
     Test basic operation.
     '''
     httpretty.register_uri(httpretty.GET,
                            '{0}/{1}'.format(TEST_BASE, 'action'),
                            body='TEST')
     scraper = WebScraper(None, None, TEST_BASE)
     self.assertEquals('TEST', scraper.request('action').read())
Exemple #3
0
 def test_basic(self):
     '''
     Test basic operation.
     '''
     httpretty.register_uri(httpretty.GET,
                            '{0}/{1}'.format(TEST_BASE, 'action'),
                            body='TEST')
     scraper = WebScraper(None, None, TEST_BASE, transport='urllib3')
     self.assertEqual('TEST', scraper.request('action').unicode_body())
 def test_basic(self):
     httpretty.register_uri(
         httpretty.GET,
         '{0}/{1}'.format(TEST_BASE, 'action'),
         body='TEST'
     )
     scraper = WebScraper(None, None, TEST_BASE)
     self.assertEquals(
         'TEST',
         scraper.request('action').read()
     )
Exemple #5
0
 def test_error(self):
     '''
     Test error handling.
     '''
     httpretty.register_uri(httpretty.GET,
                            '{0}/{1}'.format(TEST_BASE, '404'),
                            status=404)
     scraper = WebScraper(None, None, TEST_BASE)
     self.assertRaises(WebScraperError, scraper.request, '404')
Exemple #6
0
 def test_error(self):
     '''
     Test error handling.
     '''
     httpretty.register_uri(httpretty.GET,
                            '{0}/{1}'.format(TEST_BASE, '500'),
                            status=500)
     scraper = WebScraper(None, None, TEST_BASE, transport='urllib3')
     self.assertRaises(WebScraperError, scraper.request, '500')
Exemple #7
0
 def test_timeout(self):
     '''
     Test timeout handling for stale requests.
     '''
     original_timeout = suseapi.browser.DEFAULT_TIMEOUT
     suseapi.browser.DEFAULT_TIMEOUT = 0.5
     server = HTTPServer(('localhost', 8888), TimeoutHTTPHandler)
     server_thread = threading.Thread(target=server.serve_forever)
     server_thread.daemon = False
     server_thread.start()
     try:
         scraper = WebScraper(None, None, 'http://localhost:8888')
         scraper.request('foo')
         scraper.browser.select_form(nr=0)
         self.assertRaises(WebScraperError, scraper.submit)
         self.assertRaises(WebScraperError, scraper.request, 'bar?')
     finally:
         suseapi.browser.DEFAULT_TIMEOUT = original_timeout
         server.shutdown()
         server_thread.join()
 def test_timeout(self):
     '''
     Test timeout handling for stale requests.
     '''
     original_timeout = suseapi.browser.DEFAULT_TIMEOUT
     suseapi.browser.DEFAULT_TIMEOUT = 0.5
     server = HTTPServer(('localhost', 8888), TimeoutHTTPHandler)
     server_thread = threading.Thread(target=server.serve_forever)
     server_thread.daemon = False
     server_thread.start()
     try:
         scraper = WebScraper(None, None, 'http://localhost:8888')
         scraper.request('foo')
         scraper.browser.select_form(nr=0)
         self.assertRaises(WebScraperError, scraper.submit)
         self.assertRaises(WebScraperError, scraper.request, 'bar?')
     finally:
         suseapi.browser.DEFAULT_TIMEOUT = original_timeout
         server.shutdown()
         server_thread.join()
Exemple #9
0
 def test_timeout(self):
     '''
     Test timeout handling for stale requests.
     '''
     original_timeout = suseapi.browser.DEFAULT_TIMEOUT
     suseapi.browser.DEFAULT_TIMEOUT = 0.1
     server = HTTPServer(('localhost', 0), TimeoutHTTPHandler)
     port = server.server_address[1]
     server_thread = threading.Thread(target=server.serve_forever)
     server_thread.daemon = False
     server_thread.start()
     try:
         scraper = WebScraper(None,
                              None,
                              'http://localhost:%d' % port,
                              transport='urllib3')
         scraper.request('foo')
         scraper.browser.doc.choose_form(number=0)
         self.assertRaises(WebScraperError, scraper.submit)
         self.assertRaises(WebScraperError, scraper.request, 'bar?')
     finally:
         suseapi.browser.DEFAULT_TIMEOUT = original_timeout
         server.shutdown()
         server_thread.join()
 def test_cookies(self):
     scraper = WebScraper(None, None, TEST_BASE)
     cookies = scraper.get_cookies()
     scraper.set_cookies(cookies)
     self.assertEquals(len(cookies), 0)