def test_custom_headers(self): url = "http://www.example.com" with support.transient_internet(url): opener = urllib_request.build_opener() request = urllib_request.Request(url) self.assertFalse(request.header_items()) opener.open(request) self.assertTrue(request.header_items()) self.assertTrue(request.has_header('User-agent')) request.add_header('User-Agent','Test-Agent') opener.open(request) self.assertEqual(request.get_header('User-agent'),'Test-Agent')
def setUp(self): super(ProxyAuthTests, self).setUp() self.digest_auth_handler = DigestAuthHandler() self.digest_auth_handler.set_users({self.USER: self.PASSWD}) self.digest_auth_handler.set_realm(self.REALM) def create_fake_proxy_handler(*args, **kwargs): return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs) self.server = LoopbackHttpServerThread(create_fake_proxy_handler) self.server.start() self.server.ready.wait() proxy_url = "http://127.0.0.1:%d" % self.server.port handler = urllib_request.ProxyHandler({"http" : proxy_url}) self.proxy_digest_handler = urllib_request.ProxyDigestAuthHandler() self.opener = urllib_request.build_opener( handler, self.proxy_digest_handler)
def setUp(self): super(ProxyAuthTests, self).setUp() self.digest_auth_handler = DigestAuthHandler() self.digest_auth_handler.set_users({self.USER: self.PASSWD}) self.digest_auth_handler.set_realm(self.REALM) def create_fake_proxy_handler(*args, **kwargs): return FakeProxyHandler(self.digest_auth_handler, *args, **kwargs) self.server = LoopbackHttpServerThread(create_fake_proxy_handler) self.server.start() self.server.ready.wait() proxy_url = "http://127.0.0.1:%d" % self.server.port handler = urllib_request.ProxyHandler({"http": proxy_url}) self.proxy_digest_handler = urllib_request.ProxyDigestAuthHandler() self.opener = urllib_request.build_opener(handler, self.proxy_digest_handler)
def _test_urls(self, urls, handlers, retry=True): import time import logging debug = logging.getLogger("test_urllib2").debug urlopen = urllib_request.build_opener(*handlers).open if retry: urlopen = _wrap_with_retry_thrice(urlopen, urllib_error.URLError) for url in urls: if isinstance(url, tuple): url, req, expected_err = url else: req = expected_err = None with support.transient_internet(url): debug(url) try: f = urlopen(url, req, TIMEOUT) except EnvironmentError as err: debug(err) if expected_err: msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" % (expected_err, url, req, type(err), err)) self.assertIsInstance(err, expected_err, msg) except urllib_error.URLError as err: if isinstance(err[0], socket.timeout): print("<timeout: %s>" % url, file=sys.stderr) continue else: raise else: try: with support.time_out: with support.socket_peer_reset: with support.ioerror_peer_reset: buf = f.read() debug("read %d bytes" % len(buf)) except socket.timeout: print("<timeout: %s>" % url, file=sys.stderr) f.close() debug("******** next url coming up...") time.sleep(0.1)