def test_connect_using_sslcontext_verified(self): with support.transient_internet(self.testServer): can_verify = check_ssl_verifiy(self.testServer, self.remotePort) if not can_verify: self.skipTest("SSL certificate can't be verified") support.get_attribute(smtplib, 'SMTP_SSL') context = ssl.create_default_context() with support.transient_internet(self.testServer): server = smtplib.SMTP_SSL(self.testServer, self.remotePort, context=context) server.ehlo() server.quit()
def test_http_basic(self): self.assertIsNone(socket.getdefaulttimeout()) url = support.TEST_HTTP_URL with support.transient_internet(url, timeout=None): u = _urlopen_with_retry(url) self.addCleanup(u.close) self.assertIsNone(u.fp.raw._sock.gettimeout())
def _test_urls(self, urls, handlers, retry=True): import socket import time import logging debug = logging.getLogger("test_urllib2").debug urlopen = urllib.request.build_opener(*handlers).open if retry: urlopen = _wrap_with_retry_thrice(urlopen, urllib.error.URLError) for url in urls: if isinstance(url, tuple): url, req, expected_err = url else: req = expected_err = None debug(url) try: f = urlopen(url, req) except EnvironmentError as err: debug(err) if expected_err: msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" % (expected_err, url, req, type(err), err)) self.assert_(isinstance(err, expected_err), msg) else: with support.transient_internet(): buf = f.read() f.close() debug("read %d bytes" % len(buf)) debug("******** next url coming up...") time.sleep(0.1)
def test_http_basic(self): self.assertTrue(socket.getdefaulttimeout() is None) url = "http://www.python.org" with support.transient_internet(url, timeout=None): u = _urlopen_with_retry(url) self.addCleanup(u.close) self.assertTrue(u.fp.raw._sock.gettimeout() is None)
def test_redirect_url_withfrag(self): redirect_url_with_frag = "http://www.pythontest.net/redir/with_frag/" with support.transient_internet(redirect_url_with_frag): req = urllib.request.Request(redirect_url_with_frag) res = urllib.request.urlopen(req) self.assertEqual(res.geturl(), "http://www.pythontest.net/elsewhere/#frag")
def test_urlwithfrag(self): urlwith_frag = "http://docs.python.org/2/glossary.html#glossary" with support.transient_internet(urlwith_frag): req = urllib.request.Request(urlwith_frag) res = urllib.request.urlopen(req) self.assertEqual(res.geturl(), "http://docs.python.org/2/glossary.html#glossary")
def test_connect_using_sslcontext(self): context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) support.get_attribute(smtplib, 'SMTP_SSL') with support.transient_internet(self.testServer): server = smtplib.SMTP_SSL(self.testServer, self.remotePort, context=context) server.ehlo() server.quit()
def testPasswordProtectedSite(self): support.requires('network') with support.transient_internet('mueblesmoraleda.com'): url = 'http://mueblesmoraleda.com' robots_url = url + "/robots.txt" # First check the URL is usable for our purposes, since the # test site is a bit flaky. try: urlopen(robots_url) except HTTPError as e: if e.code not in {401, 403}: self.skipTest( "%r should return a 401 or 403 HTTP error, not %r" % (robots_url, e.code)) else: self.skipTest( "%r should return a 401 or 403 HTTP error, not succeed" % (robots_url)) parser = urllib.robotparser.RobotFileParser() parser.set_url(url) try: parser.read() except URLError: self.skipTest('%s is unavailable' % url) self.assertEqual(parser.can_fetch("*", robots_url), False)
def test_redirect_url_withfrag(self): redirect_url_with_frag = "http://bit.ly/1iSHToT" with support.transient_internet(redirect_url_with_frag): req = request.Request(redirect_url_with_frag) res = yield from request.urlopen(req) self.assertEqual(res.geturl(), "https://docs.python.org/3.4/glossary.html#term-global-interpreter-lock")
def test_connect_using_sslcontext_verified(self): support.get_attribute(smtplib, 'SMTP_SSL') context = ssl.create_default_context() with support.transient_internet(self.testServer): server = smtplib.SMTP_SSL(self.testServer, self.remotePort, context=context) server.ehlo() server.quit()
def test_redirect_url_withfrag(self): redirect_url_with_frag = "http://bitly.com/urllibredirecttest" with support.transient_internet(redirect_url_with_frag): req = urllib.request.Request(redirect_url_with_frag) res = urllib.request.urlopen(req) self.assertEqual(res.geturl(), "http://docs.python.org/3.4/glossary.html#term-global-interpreter-lock")
def test_urlwithfrag(self): urlwith_frag = "http://www.pythontest.net/index.html#frag" with support.transient_internet(urlwith_frag): req = urllib.request.Request(urlwith_frag) res = urllib.request.urlopen(req) self.assertEqual(res.geturl(), "http://www.pythontest.net/index.html#frag")
def test_networked(self): # Default settings: no cert verification is done support.requires("network") with support.transient_internet("svn.python.org"): h = client.HTTPSConnection("svn.python.org", 443) h.request("GET", "/") resp = h.getresponse() self._check_svn_python_org(resp)
def resolve_address(host, port): """Resolve an (host, port) to an address. We must perform name resolution before timeout tests, otherwise it will be performed by connect(). """ with support.transient_internet(host): return socket.getaddrinfo(host, port, socket.AF_INET, socket.SOCK_STREAM)[0][4]
def test_networked(self): # Default settings: no cert verification is done support.requires('network') with support.transient_internet('svn.python.org'): h = client.HTTPSConnection('svn.python.org', 443) h.request('GET', '/') resp = h.getresponse() self._check_svn_python_org(resp)
def urlretrieve(self, *args, **kwargs): resource = args[0] with support.transient_internet(resource): file_location, info = urllib.request.urlretrieve(*args, **kwargs) try: yield file_location, info finally: support.unlink(file_location)
def urlopen(self, *args, **kwargs): resource = args[0] with support.transient_internet(resource): r = urllib.request.urlopen(*args, **kwargs) try: yield r finally: r.close()
def test_logincapa(self): with transient_internet(self.host): for cap in self.server.capabilities: self.assertIsInstance(cap, str) self.assertTrue('LOGINDISABLED' in self.server.capabilities) self.assertTrue('AUTH=ANONYMOUS' in self.server.capabilities) rs = self.server.login(self.username, self.password) self.assertEqual(rs[0], 'OK')
def testPythonOrg(self): support.requires('network') with support.transient_internet('www.python.org'): parser = urllib.robotparser.RobotFileParser( "http://www.python.org/robots.txt") parser.read() self.assertTrue( parser.can_fetch("*", "http://www.python.org/robots.txt"))
def test_ftp_no_timeout(self): self.assertTrue(socket.getdefaulttimeout() is None) with support.transient_internet(self.FTP_HOST): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(self.FTP_HOST, timeout=None) finally: socket.setdefaulttimeout(None) self.assertTrue(u.fp.fp.raw._sock.gettimeout() is None)
def test_networked(self): # Default settings: requires a valid cert from a trusted CA import ssl support.requires('network') with support.transient_internet('self-signed.pythontest.net'): h = client.HTTPSConnection('self-signed.pythontest.net', 443) with self.assertRaises(ssl.SSLError) as exc_info: h.request('GET', '/') self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_networked_trusted_by_default_cert(self): # Default settings: requires a valid cert from a trusted CA support.requires('network') with support.transient_internet('www.python.org'): h = client.HTTPSConnection('www.python.org', 443) h.request('GET', '/') resp = h.getresponse() content_type = resp.getheader('content-type') self.assertIn('text/html', content_type)
def test_ftp_timeout(self): with support.transient_internet(self.FTP_HOST): u = _urlopen_with_retry(self.FTP_HOST, timeout=60) self.addCleanup(u.close) if utils.PY2: sock = u.fp.fp._sock else: sock = u.fp.fp.raw._sock self.assertEqual(sock.gettimeout(), 60)
def test_connect_using_sslcontext(self): context = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT) context.check_hostname = False context.verify_mode = ssl.CERT_NONE support.get_attribute(smtplib, 'SMTP_SSL') with support.transient_internet(self.testServer): server = smtplib.SMTP_SSL(self.testServer, self.remotePort, context=context) server.ehlo() server.quit()
def test_sites_no_connection_close(self): # Some sites do not send Connection: close header. # Verify that those work properly. (#issue12576) URL = 'http://www.imdb.com' # No Connection:close with support.transient_internet(URL): req = urllib.request.urlopen(URL) res = req.read() self.assertTrue(res)
def testConnectTimeout(self): # Choose a private address that is unlikely to exist to prevent # failures due to the connect succeeding before the timeout. # Use a dotted IP address to avoid including the DNS lookup time # with the connect time. This avoids failing the assertion that # the timeout occurred fast enough. addr = ("10.0.0.0", 12345) with support.transient_internet(addr[0]): self._sock_operation(1, 0.001, "connect", addr)
def test_ftp_default_timeout(self): self.assertTrue(socket.getdefaulttimeout() is None) with support.transient_internet(self.FTP_HOST): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(self.FTP_HOST) self.addCleanup(u.close) finally: socket.setdefaulttimeout(None) self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
def test_ssl_context_keyfile_exclusive(self): with transient_internet(self.host): self.assertRaises( ValueError, self.imap_class, self.host, self.port, keyfile=CERTFILE, ssl_context=self.create_ssl_context(), )
def test_getcode(self): # test getcode() with the fancy opener to get 404 error codes URL = "http://www.example.com/XXXinvalidXXX" with support.transient_internet(URL): open_url = urllib.request.FancyURLopener().open(URL) try: code = open_url.getcode() finally: open_url.close() self.assertEqual(code, 404)
def test_close(self): # calling .close() on urllib2's response objects should close the # underlying socket url = "http://www.python.org/" with support.transient_internet(url): response = _urlopen_with_retry(url) sock = response.fp self.assertTrue(not sock.closed) response.close() self.assertTrue(sock.closed)
def setUpClass(cls): support.requires('network') with support.transient_internet(cls.base_url): cls.parser = urllib.robotparser.RobotFileParser(cls.robots_txt) cls.parser.read()
def test_http_timeout(self): url = "http://www.example.com" with support.transient_internet(url): u = _urlopen_with_retry(url, timeout=120) self.addCleanup(u.close) self.assertEqual(u.fp.raw._sock.gettimeout(), 120)
def testConnectTimeout(self): # Testing connect timeout is tricky: we need to have IP connectivity # to a host that silently drops our packets. We can't simulate this # from Python because it's a function of the underlying TCP/IP stack. # So, the following Snakebite host has been defined: blackhole = resolve_address('blackhole.snakebite.net', 56666) # Blackhole has been configured to silently drop any incoming packets. # No RSTs (for TCP) or ICMP UNREACH (for UDP/ICMP) will be sent back # to hosts that attempt to connect to this address: which is exactly # what we need to confidently test connect timeout. # However, we want to prevent false positives. It's not unreasonable # to expect certain hosts may not be able to reach the blackhole, due # to firewalling or general network configuration. In order to improve # our confidence in testing the blackhole, a corresponding 'whitehole' # has also been set up using one port higher: whitehole = resolve_address('whitehole.snakebite.net', 56667) # This address has been configured to immediately drop any incoming # packets as well, but it does it respectfully with regards to the # incoming protocol. RSTs are sent for TCP packets, and ICMP UNREACH # is sent for UDP/ICMP packets. This means our attempts to connect to # it should be met immediately with ECONNREFUSED. The test case has # been structured around this premise: if we get an ECONNREFUSED from # the whitehole, we proceed with testing connect timeout against the # blackhole. If we don't, we skip the test (with a message about not # getting the required RST from the whitehole within the required # timeframe). # For the records, the whitehole/blackhole configuration has been set # up using the 'pf' firewall (available on BSDs), using the following: # # ext_if="bge0" # # blackhole_ip="35.8.247.6" # whitehole_ip="35.8.247.6" # blackhole_port="56666" # whitehole_port="56667" # # block return in log quick on $ext_if proto { tcp udp } \ # from any to $whitehole_ip port $whitehole_port # block drop in log quick on $ext_if proto { tcp udp } \ # from any to $blackhole_ip port $blackhole_port # skip = True sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # Use a timeout of 3 seconds. Why 3? Because it's more than 1, and # less than 5. i.e. no particular reason. Feel free to tweak it if # you feel a different value would be more appropriate. timeout = 3 sock.settimeout(timeout) try: sock.connect((whitehole)) except socket.timeout: pass except OSError as err: if err.errno == errno.ECONNREFUSED: skip = False finally: sock.close() del sock if skip: self.skipTest( "We didn't receive a connection reset (RST) packet from " "{}:{} within {} seconds, so we're unable to test connect " "timeout against the corresponding {}:{} (which is " "configured to silently drop packets).".format( whitehole[0], whitehole[1], timeout, blackhole[0], blackhole[1], )) # All that hard work just to test if connect times out in 0.001s ;-) self.addr_remote = blackhole with support.transient_internet(self.addr_remote[0]): self._sock_operation(1, 0.001, 'connect', self.addr_remote)
def test_http_timeout(self): url = support.TEST_HTTP_URL with support.transient_internet(url): u = _urlopen_with_retry(url, timeout=120) self.addCleanup(u.close) self.assertEqual(u.fp.raw._sock.gettimeout(), 120)
def test_logincapa_with_client_certfile(self): with transient_internet(self.host): _server = self.imap_class(self.host, self.port, certfile=CERTFILE) self.check_logincapa(_server)
def urlretrieve(self, *args): resource = args[0] with support.transient_internet(resource): return urllib.request.urlretrieve(*args)
def setUp(self): with transient_internet(self.host): self.server = self.imap_class(self.host, self.port)
def tearDown(self): if self.server is not None: with transient_internet(self.host): self.server.logout()
def test_logout(self): with transient_internet(self.host): rs = self.server.logout() self.server = None self.assertEqual(rs[0], 'BYE')
def setUp(self): super().setUp() with transient_internet(self.host): rs = self.server.starttls() self.assertEqual(rs[0], 'OK')
def test_logincapa(self): with transient_internet(self.host): _server = self.imap_class(self.host, self.port) self.check_logincapa(_server)
def test_logout(self): with transient_internet(self.host): _server = self.imap_class(self.host, self.port) rs = _server.logout() self.assertEqual(rs[0], 'BYE')
def test_connect(self): support.get_attribute(smtplib, 'SMTP_SSL') with support.transient_internet(self.testServer): server = smtplib.SMTP_SSL(self.testServer, self.remotePort) server.ehlo() server.quit()
def urlopen(self, *args, **kwargs): resource = args[0] with support.transient_internet(resource): return urllib.request.urlopen(*args, **kwargs)
def test_ssl_context_keyfile_exclusive(self): with transient_internet(self.host): self.assertRaises( ValueError, self.imap_class, self.host, self.port, keyfile=CERTFILE, ssl_context=self.create_ssl_context())
def test_logincapa_with_client_ssl_context(self): with transient_internet(self.host): _server = self.imap_class(self.host, self.port, ssl_context=self.create_ssl_context()) self.check_logincapa(_server)
def test_ftp_timeout(self): with support.transient_internet(self.FTP_HOST): u = _urlopen_with_retry(self.FTP_HOST, timeout=60) self.addCleanup(u.close) self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
def test_logincapa_with_client_certfile(self): with transient_internet(self.host): with support.check_warnings(('', DeprecationWarning)): _server = self.imap_class(self.host, self.port, certfile=CERTFILE) self.check_logincapa(_server)
def test_ftp_basic(self): self.assertIsNone(socket.getdefaulttimeout()) with support.transient_internet(self.FTP_HOST, timeout=None): u = _urlopen_with_retry(self.FTP_HOST) self.addCleanup(u.close) self.assertIsNone(u.fp.fp.raw._sock.gettimeout())
def testURLread(self): with support.transient_internet("www.example.com"): f = urllib.request.urlopen("http://www.example.com/") x = f.read()
def testRecvTimeout(self): # Test recv() timeout with support.transient_internet(self.addr_remote[0]): self.sock.connect(self.addr_remote) self._sock_operation(1, 1.5, 'recv', 1024)
def testURLread(self): with support.transient_internet("www.python.org"): f = urllib.request.urlopen("http://www.python.org/") x = f.read()
def testURLread(self): domain = urllib.parse.urlparse(support.TEST_HTTP_URL).netloc with support.transient_internet(domain): f = urllib.request.urlopen(support.TEST_HTTP_URL) f.read()