def test_urlwithfrag(self): urlwith_frag = "http://docs.python.org/2/glossary.html#glossary" with support.transient_internet(urlwith_frag): req = urllib_request.Request(urlwith_frag) res = urllib_request.urlopen(req) self.assertEqual(res.geturl(), "http://docs.python.org/2/glossary.html#glossary")
def test_ftp_timeout(self): with support.transient_internet(self.FTP_HOST): u = _urlopen_with_retry(self.FTP_HOST, timeout=60) self.addCleanup(u.close) if utils.PY2: sock = u.fp.fp._sock else: sock = u.fp.fp.raw._sock self.assertEqual(sock.gettimeout(), 60)
def test_close(self): # calling .close() on urllib2's response objects should close the # underlying socket url = "http://www.python.org/" with support.transient_internet(url): response = _urlopen_with_retry(url) sock = response.fp self.assertFalse(sock.closed) response.close() self.assertTrue(sock.closed)
def test_ftp_basic(self): self.assertIsNone(socket.getdefaulttimeout()) with support.transient_internet(self.FTP_HOST, timeout=None): u = _urlopen_with_retry(self.FTP_HOST) self.addCleanup(u.close) if utils.PY2: sock = u.fp.fp._sock else: sock = u.fp.fp.raw._sock self.assertIsNone(sock.gettimeout())
def test_http_timeout(self): url = "http://www.python.org" with support.transient_internet(url): u = _urlopen_with_retry(url, timeout=120) self.addCleanup(u.close) if utils.PY2: sock = u.fp._sock else: sock = u.fp.raw._sock self.assertEqual(sock.gettimeout(), 120)
def test_http_basic(self): self.assertIsNone(socket.getdefaulttimeout()) url = "http://www.python.org" with support.transient_internet(url, timeout=None): u = _urlopen_with_retry(url) self.addCleanup(u.close) if utils.PY2: sock = u.fp._sock else: sock = u.fp.raw._sock self.assertIsNone(sock.gettimeout())
def test_custom_headers(self): url = "http://www.example.com" with support.transient_internet(url): opener = urllib_request.build_opener() request = urllib_request.Request(url) self.assertFalse(request.header_items()) opener.open(request) self.assertTrue(request.header_items()) self.assertTrue(request.has_header('User-agent')) request.add_header('User-Agent','Test-Agent') opener.open(request) self.assertEqual(request.get_header('User-agent'),'Test-Agent')
def test_ftp_default_timeout(self): self.assertIsNone(socket.getdefaulttimeout()) with support.transient_internet(self.FTP_HOST): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(self.FTP_HOST) self.addCleanup(u.close) finally: socket.setdefaulttimeout(None) if utils.PY2: sock = u.fp.fp._sock else: sock = u.fp.fp.raw._sock self.assertEqual(sock.gettimeout(), 60)
def test_http_default_timeout(self): self.assertIsNone(socket.getdefaulttimeout()) url = "http://www.python.org" with support.transient_internet(url): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(url) self.addCleanup(u.close) finally: socket.setdefaulttimeout(None) if utils.PY2: sock = u.fp._sock else: sock = u.fp.raw._sock self.assertEqual(sock.gettimeout(), 60)
def test_sni(self): self.skipTest("test disabled - test server needed") # Checks that Server Name Indication works, if supported by the # OpenSSL linked to. # The ssl module itself doesn't have server-side support for SNI, # so we rely on a third-party test site. expect_sni = ssl.HAS_SNI with support.transient_internet("XXX"): u = urllib_request.urlopen("XXX") contents = u.readall() if expect_sni: self.assertIn(b"Great", contents) self.assertNotIn(b"Unfortunately", contents) else: self.assertNotIn(b"Great", contents) self.assertIn(b"Unfortunately", contents)
def _test_urls(self, urls, handlers, retry=True): import time import logging debug = logging.getLogger("test_urllib2").debug urlopen = urllib_request.build_opener(*handlers).open if retry: urlopen = _wrap_with_retry_thrice(urlopen, urllib_error.URLError) for url in urls: if isinstance(url, tuple): url, req, expected_err = url else: req = expected_err = None with support.transient_internet(url): debug(url) try: f = urlopen(url, req, TIMEOUT) except EnvironmentError as err: debug(err) if expected_err: msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" % (expected_err, url, req, type(err), err)) self.assertIsInstance(err, expected_err, msg) except urllib_error.URLError as err: if isinstance(err[0], socket.timeout): print("<timeout: %s>" % url, file=sys.stderr) continue else: raise else: try: with support.time_out: with support.socket_peer_reset: with support.ioerror_peer_reset: buf = f.read() debug("read %d bytes" % len(buf)) except socket.timeout: print("<timeout: %s>" % url, file=sys.stderr) f.close() debug("******** next url coming up...") time.sleep(0.1)
def test_sites_no_connection_close(self): # Some sites do not send Connection: close header. # Verify that those work properly. (#issue12576) URL = 'http://www.imdb.com' # mangles Connection:close with support.transient_internet(URL): try: with urllib_request.urlopen(URL) as res: pass except ValueError as e: self.fail("urlopen failed for site not sending \ Connection:close") else: self.assertTrue(res) req = urllib_request.urlopen(URL) res = req.read() self.assertTrue(res)