def test_urlwithfrag(self): urlwith_frag = "http://www.pythontest.net/index.html#frag" with test_support.transient_internet(urlwith_frag): req = urllib2.Request(urlwith_frag) res = urllib2.urlopen(req) self.assertEqual(res.geturl(), "http://www.pythontest.net/index.html#frag")
def testPasswordProtectedSite(self): test_support.requires('network') with test_support.transient_internet('mueblesmoraleda.com'): url = 'http://mueblesmoraleda.com' robots_url = url + "/robots.txt" # First check the URL is usable for our purposes, since the # test site is a bit flaky. try: urlopen(robots_url) except HTTPError as e: if e.code not in {401, 403}: self.skipTest( "%r should return a 401 or 403 HTTP error, not %r" % (robots_url, e.code)) else: self.skipTest( "%r should return a 401 or 403 HTTP error, not succeed" % (robots_url)) parser = robotparser.RobotFileParser() parser.set_url(url) try: parser.read() except IOError: self.skipTest('%s is unavailable' % url) self.assertEqual(parser.can_fetch("*", robots_url), False)
def test_http_basic(self): self.assertIsNone(socket.getdefaulttimeout()) url = "http://www.example.com" with test_support.transient_internet(url, timeout=None): u = _urlopen_with_retry(url) self.assertIsNone(u.fp._sock.fp._sock.gettimeout()) u.close()
def test_urlwithfrag(self): urlwith_frag = "https://docs.python.org/2/glossary.html#glossary" with test_support.transient_internet(urlwith_frag): req = urllib2.Request(urlwith_frag) res = urllib2.urlopen(req) self.assertEqual(res.geturl(), "https://docs.python.org/2/glossary.html#glossary")
def _test_urls(self, urls, handlers, retry=True): import socket import time import logging debug = logging.getLogger("test_urllib2").debug urlopen = urllib2.build_opener(*handlers).open if retry: urlopen = _wrap_with_retry_thrice(urlopen, urllib2.URLError) for url in urls: if isinstance(url, tuple): url, req, expected_err = url else: req = expected_err = None debug(url) try: f = urlopen(url, req) except EnvironmentError, err: debug(err) if expected_err: msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" % (expected_err, url, req, type(err), err)) self.assert_(isinstance(err, expected_err), msg) else: with test_support.transient_internet(): buf = f.read() f.close() debug("read %d bytes" % len(buf)) debug("******** next url coming up...") time.sleep(0.1)
def test_ftp_timeout(self): with test_support.transient_internet(self.FTP_HOST): try: u = _urlopen_with_retry(self.FTP_HOST, timeout=60) except: raise self.assertEqual(u.fp.fp._sock.gettimeout(), 60) u.close()
def _test_urls(self, urls, handlers, retry=True): import time import logging debug = logging.getLogger("test_urllib2").debug urlopen = urllib2.build_opener(*handlers).open if retry: urlopen = _wrap_with_retry_thrice(urlopen, urllib2.URLError) for url in urls: if isinstance(url, tuple): url, req, expected_err = url else: req = expected_err = None with test_support.transient_internet(url): debug(url) try: f = urlopen(url, req, TIMEOUT) except EnvironmentError as err: debug(err) if expected_err: msg = "Didn't get expected error(s) %s for %s %s, got %s: %s" % ( expected_err, url, req, type(err), err, ) self.assertIsInstance(err, expected_err, msg) except urllib2.URLError as err: if isinstance(err[0], socket.timeout): print >>sys.stderr, "<timeout: %s>" % url continue else: raise else: try: with test_support.transient_internet(url): buf = f.read() debug("read %d bytes" % len(buf)) except socket.timeout: print >>sys.stderr, "<timeout: %s>" % url f.close() debug("******** next url coming up...") time.sleep(0.1)
def testPythonOrg(self): support.requires('network') with support.transient_internet('www.python.org'): parser = robotparser.RobotFileParser( "http://www.python.org/robots.txt") parser.read() self.assertTrue( parser.can_fetch("*", "http://www.python.org/robots.txt"))
def test_ftp_no_timeout(self): self.assertIsNone(socket.getdefaulttimeout(),) with test_support.transient_internet(self.FTP_HOST): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(self.FTP_HOST, timeout=None) finally: socket.setdefaulttimeout(None) self.assertIsNone(u.fp.fp._sock.gettimeout())
def test_networked(self): # Default settings: requires a valid cert from a trusted CA import ssl test_support.requires('network') with test_support.transient_internet('self-signed.pythontest.net'): h = httplib.HTTPSConnection('self-signed.pythontest.net', 443) with self.assertRaises(ssl.SSLError) as exc_info: h.request('GET', '/') self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_ftp_default_timeout(self): self.assertTrue(socket.getdefaulttimeout() is None) with test_support.transient_internet(self.FTP_HOST): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(self.FTP_HOST) finally: socket.setdefaulttimeout(None) self.assertEqual(u.fp.fp._sock.gettimeout(), 60)
def test_networked_trusted_by_default_cert(self): # Default settings: requires a valid cert from a trusted CA test_support.requires('network') with test_support.transient_internet('www.python.org'): h = httplib.HTTPSConnection('www.python.org', 443) h.request('GET', '/') resp = h.getresponse() content_type = resp.getheader('content-type') self.assertIn('text/html', content_type)
def test_sites_no_connection_close(self): # Some sites do not send Connection: close header. # Verify that those work properly. (#issue12576) URL = 'http://www.imdb.com' # No Connection:close with test_support.transient_internet(URL): req = urllib2.urlopen(URL) res = req.read() self.assertTrue(res)
def test_http_default_timeout(self): self.assertIsNone(socket.getdefaulttimeout()) url = test_support.TEST_HTTP_URL with test_support.transient_internet(url): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(url) finally: socket.setdefaulttimeout(None) self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 60)
def test_http_default_timeout(self): self.assertTrue(socket.getdefaulttimeout() is None) url = "http://www.python.org" with test_support.transient_internet(url): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(url) finally: socket.setdefaulttimeout(None) self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 60)
def test_http_no_timeout(self): self.assertIsNone(socket.getdefaulttimeout()) url = "http://www.example.com" with test_support.transient_internet(url): socket.setdefaulttimeout(60) try: u = _urlopen_with_retry(url, timeout=None) finally: socket.setdefaulttimeout(None) self.assertIsNone(u.fp._sock.fp._sock.gettimeout())
def testPasswordProtectedSite(self): test_support.requires('network') with test_support.transient_internet('mueblesmoraleda.com'): url = 'http://mueblesmoraleda.com' parser = robotparser.RobotFileParser() parser.set_url(url) try: parser.read() except IOError: self.skipTest('%s is unavailable' % url) self.assertEqual(parser.can_fetch("*", url+"/robots.txt"), False)
def testPasswordProtectedSite(self): test_support.requires('network') with test_support.transient_internet('mueblesmoraleda.com'): url = 'http://mueblesmoraleda.com' parser = robotparser.RobotFileParser() parser.set_url(url) try: parser.read() except IOError: self.skipTest('%s is unavailable' % url) self.assertEqual(parser.can_fetch("*", url + "/robots.txt"), False)
def test_multiple_ftp_retrieves(self): with test_support.transient_internet(self.FTP_TEST_FILE): try: for file_num in range(self.NUM_FTP_RETRIEVES): with test_support.temp_dir() as td: urllib.FancyURLopener().retrieve(self.FTP_TEST_FILE, os.path.join(td, str(file_num))) except IOError as e: self.fail("Failed FTP retrieve while accessing ftp url " "multiple times.\n Error message was : %s" % e)
def test_networked_noverification(self): # Switch off cert verification import ssl test_support.requires('network') with test_support.transient_internet('self-signed.pythontest.net'): context = ssl._create_stdlib_context() h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context) h.request('GET', '/') resp = h.getresponse() self.assertIn('nginx', resp.getheader('server'))
def _test_urls(self, urls, handlers, retry=True): import time import logging debug = logging.getLogger("test_urllib2").debug urlopen = urllib2.build_opener(*handlers).open if retry: urlopen = _wrap_with_retry_thrice(urlopen, urllib2.URLError) for url in urls: if isinstance(url, tuple): url, req, expected_err = url else: req = expected_err = None with test_support.transient_internet(url): debug(url) try: f = urlopen(url, req, TIMEOUT) except EnvironmentError as err: debug(err) if expected_err: msg = ( "Didn't get expected error(s) %s for %s %s, got %s: %s" % (expected_err, url, req, type(err), err)) self.assertIsInstance(err, expected_err, msg) except urllib2.URLError as err: if isinstance(err[0], socket.timeout): print >> sys.stderr, "<timeout: %s>" % url continue else: raise else: try: with test_support.transient_internet(url): buf = f.read() debug("read %d bytes" % len(buf)) except socket.timeout: print >> sys.stderr, "<timeout: %s>" % url f.close() debug("******** next url coming up...") time.sleep(0.1)
def test_networked_bad_cert(self): # We feed a "CA" cert that is unrelated to the server's cert import ssl test_support.requires('network') with test_support.transient_internet('self-signed.pythontest.net'): context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) context.verify_mode = ssl.CERT_REQUIRED context.load_verify_locations(CERT_localhost) h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context) with self.assertRaises(ssl.SSLError) as exc_info: h.request('GET', '/') self.assertEqual(exc_info.exception.reason, 'CERTIFICATE_VERIFY_FAILED')
def test_custom_headers(self): url = "http://www.example.com" with test_support.transient_internet(url): opener = urllib2.build_opener() request = urllib2.Request(url) self.assertFalse(request.header_items()) opener.open(request) self.assertTrue(request.header_items()) self.assertTrue(request.has_header('User-agent')) request.add_header('User-Agent','Test-Agent') opener.open(request) self.assertEqual(request.get_header('User-agent'),'Test-Agent')
def test_custom_headers(self): url = "http://www.example.com" with test_support.transient_internet(url): opener = urllib2.build_opener() request = urllib2.Request(url) self.assertFalse(request.header_items()) opener.open(request) self.assertTrue(request.header_items()) self.assertTrue(request.has_header('User-agent')) request.add_header('User-Agent', 'Test-Agent') opener.open(request) self.assertEqual(request.get_header('User-agent'), 'Test-Agent')
def test_networked_good_cert(self): # We feed the server's cert as a validating cert import ssl test_support.requires('network') with test_support.transient_internet('self-signed.pythontest.net'): context = ssl.SSLContext(ssl.PROTOCOL_TLSv1) context.verify_mode = ssl.CERT_REQUIRED context.load_verify_locations(CERT_selfsigned_pythontestdotnet) h = httplib.HTTPSConnection('self-signed.pythontest.net', 443, context=context) h.request('GET', '/') resp = h.getresponse() server_string = resp.getheader('server') self.assertIn('nginx', server_string)
def test_multiple_ftp_urlopen_same_host(self): with test_support.transient_internet(self.FTP_TEST_FILE): ftp_fds_to_close = [] try: for _ in range(self.NUM_FTP_RETRIEVES): fd = urllib.urlopen(self.FTP_TEST_FILE) # test ftp open without closing fd as a supported scenario. ftp_fds_to_close.append(fd) except IOError as e: self.fail("Failed FTP binary file open. " "Error message was: %s" % e) finally: # close the open fds for fd in ftp_fds_to_close: fd.close()
def testRecvTimeout(self): # Test recv() timeout _timeout = 0.02 with test_support.transient_internet(self.addr_remote[0]): self.sock.connect(self.addr_remote) self.sock.settimeout(_timeout) _t1 = time.time() self.assertRaises(socket.timeout, self.sock.recv, 1024) _t2 = time.time() _delta = abs(_t1 - _t2) self.assertTrue(_delta < _timeout + self.fuzz, "timeout (%g) is %g seconds more than expected (%g)" %(_delta, self.fuzz, _timeout))
def test_algorithms(self): if test_support.verbose: sys.stdout.write("test_algorithms disabled, " "as it fails on some old OpenSSL versions") return # Issue #8484: all algorithms should be available when verifying a # certificate. # NOTE: https://sha256.tbs-internet.com is another possible test host remote = ("sha2.hboeck.de", 443) sha256_cert = os.path.join(os.path.dirname(__file__), "sha256.pem") s = ssl.wrap_socket(socket.socket(socket.AF_INET), cert_reqs=ssl.CERT_REQUIRED, ca_certs=sha256_cert) with test_support.transient_internet(): try: s.connect(remote) if test_support.verbose: sys.stdout.write("\nCipher with %r is %r\n" % (remote, s.cipher())) sys.stdout.write("Certificate is:\n%s\n" % pprint.pformat(s.getpeercert())) finally: s.close()
def test_http_timeout(self): url = "http://www.example.com" with test_support.transient_internet(url): u = _urlopen_with_retry(url, timeout=120) self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 120)
def setUp(self): with transient_internet(self.host): self.server = self.imap_class(self.host, self.port)
def test_http_basic(self): self.assertIsNone(socket.getdefaulttimeout()) url = "http://www.example.com" with test_support.transient_internet(url, timeout=None): u = _urlopen_with_retry(url) self.assertIsNone(u.fp._sock.fp._sock.gettimeout())
def test_http_basic(self): self.assertTrue(socket.getdefaulttimeout() is None) url = "http://www.python.org" with test_support.transient_internet(url, timeout=None): u = _urlopen_with_retry(url) self.assertTrue(u.fp._sock.fp._sock.gettimeout() is None)
def test_http_timeout(self): url = test_support.TEST_HTTP_URL with test_support.transient_internet(url): u = _urlopen_with_retry(url, timeout=120) self.assertEqual(u.fp._sock.fp._sock.gettimeout(), 120) u.close()
def test_connect_default_port(self): test_support.get_attribute(smtplib, 'SMTP_SSL') with test_support.transient_internet(self.testServer): server = smtplib.SMTP_SSL(self.testServer) server.ehlo() server.quit()
except EnvironmentError, err: debug(err) if expected_err: msg = ( "Didn't get expected error(s) %s for %s %s, got %s: %s" % (expected_err, url, req, type(err), err)) self.assert_(isinstance(err, expected_err), msg) except urllib2.URLError as err: if isinstance(err[0], socket.timeout): print >> sys.stderr, "<timeout: %s>" % url continue else: raise else: try: with test_support.transient_internet(): buf = f.read() debug("read %d bytes" % len(buf)) except socket.timeout: print >> sys.stderr, "<timeout: %s>" % url f.close() debug("******** next url coming up...") time.sleep(0.1) def _extra_handlers(self): handlers = [] cfh = urllib2.CacheFTPHandler() cfh.setTimeout(1) handlers.append(cfh)
def test_connect(self): test_support.get_attribute(smtplib, "SMTP_SSL") with test_support.transient_internet(self.testServer): server = smtplib.SMTP_SSL(self.testServer, self.remotePort) server.ehlo() server.quit()
def test_http_basic(self): self.assertIsNone(socket.getdefaulttimeout()) url = test_support.TEST_HTTP_URL with test_support.transient_internet(url, timeout=None): u = _urlopen_with_retry(url) self.assertIsNone(u.fp._sock.fp._sock.gettimeout())
def test_ftp_basic(self): self.assertIsNone(socket.getdefaulttimeout()) with test_support.transient_internet(self.FTP_HOST, timeout=None): u = _urlopen_with_retry(self.FTP_HOST) self.assertIsNone(u.fp.fp._sock.gettimeout())
f = urlopen(url, req, TIMEOUT) except EnvironmentError, err: debug(err) if expected_err: msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" % (expected_err, url, req, type(err), err)) self.assert_(isinstance(err, expected_err), msg) except urllib2.URLError as err: if isinstance(err[0], socket.timeout): print >>sys.stderr, "<timeout: %s>" % url continue else: raise else: try: with test_support.transient_internet(): buf = f.read() debug("read %d bytes" % len(buf)) except socket.timeout: print >>sys.stderr, "<timeout: %s>" % url f.close() debug("******** next url coming up...") time.sleep(0.1) def _extra_handlers(self): handlers = [] cfh = urllib2.CacheFTPHandler() cfh.setTimeout(1) handlers.append(cfh)
def test_ftp_timeout(self): with test_support.transient_internet(self.FTP_HOST): u = _urlopen_with_retry(self.FTP_HOST, timeout=60) self.assertEqual(u.fp.fp._sock.gettimeout(), 60)