def test_download_file(self): """Test download_file function.""" fn = 'toy-0.0.tar.gz' target_location = os.path.join(self.test_buildpath, 'some', 'subdir', fn) # provide local file path as source URL test_dir = os.path.abspath(os.path.dirname(__file__)) source_url = 'file://%s/sandbox/sources/toy/%s' % (test_dir, fn) res = ft.download_file(fn, source_url, target_location) self.assertEqual(res, target_location, "'download' of local file works") # non-existing files result in None return value self.assertEqual( ft.download_file(fn, 'file://%s/nosuchfile' % test_dir, target_location), None) # install broken proxy handler for opening local files # this should make urllib2.urlopen use this broken proxy for downloading from a file:// URL proxy_handler = urllib2.ProxyHandler( {'file': 'file://%s/nosuchfile' % test_dir}) urllib2.install_opener(urllib2.build_opener(proxy_handler)) # downloading over a broken proxy results in None return value (failed download) # this tests whether proxies are taken into account by download_file self.assertEqual(ft.download_file(fn, source_url, target_location), None, "download over broken proxy fails") # restore a working file handler, and retest download of local file urllib2.install_opener(urllib2.build_opener(urllib2.FileHandler())) res = ft.download_file(fn, source_url, target_location) self.assertEqual( res, target_location, "'download' of local file works after removing broken proxy")
def __init__(self, *args, **kargs): urllib2.OpenerDirector.__init__(self, *args, **kargs) #agregando soporte basico self.add_handler(urllib2.ProxyHandler()) self.add_handler(urllib2.UnknownHandler()) self.add_handler(urllib2.HTTPHandler()) self.add_handler(urllib2.HTTPDefaultErrorHandler()) self.add_handler(urllib2.HTTPRedirectHandler()) self.add_handler(urllib2.FTPHandler()) self.add_handler(urllib2.FileHandler()) self.add_handler(urllib2.HTTPErrorProcessor()) #Agregar soporte para cookies. (en este momento no es necesario, #pero uno nunca sabe si se puede llegar a nececitar) self.cj = cookielib.CookieJar() self.add_handler(urllib2.HTTPCookieProcessor(self.cj))
def test_download_file(self): """Test download_file function.""" fn = 'toy-0.0.tar.gz' target_location = os.path.join(self.test_buildpath, 'some', 'subdir', fn) # provide local file path as source URL test_dir = os.path.abspath(os.path.dirname(__file__)) source_url = 'file://%s/sandbox/sources/toy/%s' % (test_dir, fn) res = ft.download_file(fn, source_url, target_location) self.assertEqual(res, target_location, "'download' of local file works") # non-existing files result in None return value self.assertEqual( ft.download_file(fn, 'file://%s/nosuchfile' % test_dir, target_location), None) # install broken proxy handler for opening local files # this should make urllib2.urlopen use this broken proxy for downloading from a file:// URL proxy_handler = urllib2.ProxyHandler( {'file': 'file://%s/nosuchfile' % test_dir}) urllib2.install_opener(urllib2.build_opener(proxy_handler)) # downloading over a broken proxy results in None return value (failed download) # this tests whether proxies are taken into account by download_file self.assertEqual(ft.download_file(fn, source_url, target_location), None, "download over broken proxy fails") # restore a working file handler, and retest download of local file urllib2.install_opener(urllib2.build_opener(urllib2.FileHandler())) res = ft.download_file(fn, source_url, target_location) self.assertEqual( res, target_location, "'download' of local file works after removing broken proxy") # make sure specified timeout is parsed correctly (as a float, not a string) opts = init_config(args=['--download-timeout=5.3']) init_config(build_options={'download_timeout': opts.download_timeout}) target_location = os.path.join(self.test_prefix, 'jenkins_robots.txt') url = 'https://jenkins1.ugent.be/robots.txt' try: urllib2.urlopen(url) res = ft.download_file(fn, url, target_location) self.assertEqual(res, target_location, "download with specified timeout works") except urllib2.URLError: print "Skipping timeout test in test_download_file (working offline)"
def test_file(self): import time, rfc822, socket h = urllib2.FileHandler() o = h.parent = MockOpener() TESTFN = test_support.TESTFN urlpath = sanepathname2url(os.path.abspath(TESTFN)) towrite = "hello, world\n" for url in [ "file://localhost%s" % urlpath, "file://%s" % urlpath, "file://%s%s" % (socket.gethostbyname('localhost'), urlpath), "file://%s%s" % (socket.gethostbyname(socket.gethostname()), urlpath), ]: f = open(TESTFN, "wb") try: try: f.write(towrite) finally: f.close() r = h.file_open(Request(url)) try: data = r.read() headers = r.info() newurl = r.geturl() finally: r.close() stats = os.stat(TESTFN) modified = rfc822.formatdate(stats.st_mtime) finally: os.remove(TESTFN) self.assertEqual(data, towrite) self.assertEqual(headers["Content-type"], "text/plain") self.assertEqual(headers["Content-length"], "13") self.assertEqual(headers["Last-modified"], modified) for url in [ "file://localhost:80%s" % urlpath, # XXXX bug: these fail with socket.gaierror, should be URLError ## "file://%s:80%s/%s" % (socket.gethostbyname('localhost'), ## os.getcwd(), TESTFN), ## "file://somerandomhost.ontheinternet.com%s/%s" % ## (os.getcwd(), TESTFN), ]: try: f = open(TESTFN, "wb") try: f.write(towrite) finally: f.close() self.assertRaises(urllib2.URLError, h.file_open, Request(url)) finally: os.remove(TESTFN) h = urllib2.FileHandler() o = h.parent = MockOpener() # XXXX why does // mean ftp (and /// mean not ftp!), and where # is file: scheme specified? I think this is really a bug, and # what was intended was to distinguish between URLs like: # file:/blah.txt (a file) # file://localhost/blah.txt (a file) # file:///blah.txt (a file) # file://ftp.example.com/blah.txt (an ftp URL) for url, ftp in [ ("file://ftp.example.com//foo.txt", True), ("file://ftp.example.com///foo.txt", False), # XXXX bug: fails with OSError, should be URLError ("file://ftp.example.com/foo.txt", False), ]: req = Request(url) try: h.file_open(req) # XXXX remove OSError when bug fixed except (urllib2.URLError, OSError): self.assert_(not ftp) else: self.assert_(o.req is req) self.assertEqual(req.type, "ftp")
#! -*- encoding:utf-8 -*- import urllib import urllib2 handler = urllib2.FileHandler() request = urllib2.Request(url='file:/D:\Py_exam\python_ex\url_open.py') opener = urllib2.build_opener(handler) f = opener.open(request) print f.read() f = urllib.urlopen(url='file:/D:\Py_exam\python_ex\url_open.py') print f.read()