def fake_urlopen(self, url): """Fake urlopen using test client""" if "example" in url: response = StringIO("") return addinfourl(response, {"X-Pingback": "/xmlrpc.php", "Content-Type": "text/html; charset=utf-8"}, url) elif "localhost" in url: response = StringIO('<link rel="pingback" href="/xmlrpc/">') return addinfourl(response, {"Content-Type": "application/xhtml+xml"}, url) elif "google" in url: response = StringIO("PNG CONTENT") return addinfourl(response, {"content-type": "image/png"}, url) elif "error" in url: raise URLError("Invalid ressource")
def make_request(self, connection, request): connection.timeout = request.timeout try: try: # Python 3 connection.request( request.get_method(), request.selector, request.data, request.headers ) except AttributeError: # Python 2 connection.request( request.get_method(), request.get_selector(), request.data, request.headers ) try: raw_response = connection.getresponse(buffering=True) except TypeError: raw_response = connection.getresponse() except (socket.error, HTTPException): return None raw_response._handler = self raw_response._connection = connection try: # Python 3 response = addinfourl( raw_response, raw_response.msg, request.get_full_url(), raw_response.status ) except AttributeError: # Python 2 response = addinfourl( socket._fileobject(raw_response, close=True), raw_response.msg, request.get_full_url(), raw_response.status ) raw_response.recv = raw_response.read response.msg = raw_response.reason response.raw = raw_response return response
def fake_urlopen(self, url): """Fake urlopen using test client""" if 'example' in url: response = StringIO('') return addinfourl(response, {'X-Pingback': '/xmlrpc.php', 'Content-Type': 'text/html'}, url) elif 'localhost' in url: response = StringIO( '<link rel="pingback" href="/xmlrpc/">') return addinfourl(response, {'Content-Type': 'text/xhtml'}, url) elif 'google' in url: response = StringIO('PNG CONTENT') return addinfourl(response, {'content-type': 'image/png'}, url) elif 'error' in url: raise URLError('Invalid ressource')
def retry_using_http_NTLM_auth(self, req, auth_header_field, realm, headers): user, pw = self.passwd.find_user_password(realm, req.get_full_url()) if pw is not None: user_parts = user.split('\\', 1) if len(user_parts) == 1: UserName = user_parts[0] DomainName = '' type1_flags = ntlm.NTLM_TYPE1_FLAGS & ~ntlm.NTLM_NegotiateOemDomainSupplied else: DomainName = user_parts[0].upper() UserName = user_parts[1] type1_flags = ntlm.NTLM_TYPE1_FLAGS # ntlm secures a socket, so we must use the same socket for the complete handshake headers = dict(req.headers) headers.update(req.unredirected_hdrs) auth = 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(user, type1_flags) if req.headers.get(self.auth_header, None) == auth: return None headers[self.auth_header] = auth host = req.get_host() if not host: raise urllib.request.URLError('no host given') h = None if req.get_full_url().startswith('https://'): h = http.client.HTTPSConnection(host) # will parse host:port else: h = http.client.HTTPConnection(host) # will parse host:port # we must keep the connection because NTLM authenticates the connection, not single requests headers["Connection"] = "Keep-Alive" headers = dict((name.title(), val) for name, val in list(headers.items())) h.request(req.get_method(), req.get_selector(), req.data, headers) r = h.getresponse() r.begin() r._safe_read(int(r.getheader('content-length'))) try: if r.getheader('set-cookie'): # this is important for some web applications that store authentication-related info in cookies (it took a long time to figure out) headers['Cookie'] = r.getheader('set-cookie') except TypeError: pass r.fp = None # remove the reference to the socket, so that it can not be closed by the response object (we want to keep the socket open) auth_header_value = r.getheader(auth_header_field, None) (ServerChallenge, NegotiateFlags) = ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value[5:]) auth = 'NTLM %s' % ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, UserName, DomainName, pw, NegotiateFlags) headers[self.auth_header] = auth headers["Connection"] = "Close" headers = dict((name.title(), val) for name, val in list(headers.items())) try: h.request(req.get_method(), req.get_selector(), req.data, headers) # none of the configured handlers are triggered, for example redirect-responses are not handled! response = h.getresponse() def notimplemented(): raise NotImplementedError response.readline = notimplemented return addinfourl(response, response.msg, req.get_full_url()) except socket.error as err: raise urllib.request.URLError(err) else: return None
def data_open(self, req): # data URLs as specified in RFC 2397. # # ignores POSTed data # # syntax: # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data # mediatype := [ type "/" subtype ] *( ";" parameter ) # data := *urlchar # parameter := attribute "=" value url = req.get_full_url() scheme, data = url.split(':', 1) mediatype, data = data.split(',', 1) # even base64 encoded data URLs might be quoted so unquote in any case: data = compat_urllib_parse_unquote_to_bytes(data) if mediatype.endswith(';base64'): data = binascii.a2b_base64(data) mediatype = mediatype[:-7] if not mediatype: mediatype = 'text/plain;charset=US-ASCII' headers = email.message_from_string( 'Content-type: %s\nContent-length: %d\n' % (mediatype, len(data))) return compat_urllib_response.addinfourl(io.BytesIO(data), headers, url)
def http_response(self, req, resp): old_resp = resp if resp.headers.get("content-encoding") == "gzip": gz = GzipFile( fileobj=StringIO(resp.read()), mode='r' ) resp = response.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code) resp.msg = old_resp.msg if resp.headers.get("content-encoding") == "deflate": gz = StringIO(deflate(resp.read())) resp = response.addinfourl(gz, old_resp.headers, old_resp.url, old_resp.code) resp.msg = old_resp.msg return resp
def _ExpectResponseFromFile(filename, status_code=200): with open(os.path.join( os.path.dirname(os.path.abspath(__file__)), 'testdata', filename + '.json')) as f: return response.addinfourl( io.BytesIO(f.read().encode('utf-8')), {'Content-Encoding': 'utf-8'}, '', status_code)
def command_open(self, req): headers = {'Content-Encoding': 'application/json'} url = req.full_url command = urlparse(url).netloc command_dir = pathlib.Path(DATADIR).joinpath('bin') command_path = command_dir.joinpath(command) command_args = [str(command_path), '-'] out = subprocess.check_output(command_args, input=req.data) data = io.BytesIO(out) return addinfourl(data, headers, req.full_url, code=200)
def mailman_open(self, req): list_manager = getUtility(IListManager) # Parse urls of the form: # # mailman:///<fqdn_listname|list_id>/<language>/<template_name> # # where only the template name is required. mlist = code = template = None # Parse the full requested URL and be sure it's something we handle. original_url = req.get_full_url() parsed = urlparse(original_url) assert parsed.scheme == 'mailman' # The path can contain one, two, or three components. Since no empty # path components are legal, filter them out. parts = [p for p in parsed.path.split('/') if p] if len(parts) == 0: raise URLError('No template specified') elif len(parts) == 1: template = parts[0] elif len(parts) == 2: part0, template = parts # Is part0 a language code or a mailing list? This is rather # tricky because if it's a mailing list, it could be a list-id and # that will contain dots, as could the language code. language = getUtility(ILanguageManager).get(part0) if language is None: # part0 must be a fqdn-listname or list-id. mlist = (list_manager.get(part0) if '@' in part0 else list_manager.get_by_list_id(part0)) if mlist is None: raise URLError('Bad language or list name') else: code = language.code elif len(parts) == 3: part0, code, template = parts # part0 could be an fqdn-listname or a list-id. mlist = (getUtility(IListManager).get(part0) if '@' in part0 else getUtility(IListManager).get_by_list_id(part0)) if mlist is None: raise URLError('Missing list') language = getUtility(ILanguageManager).get(code) if language is None: raise URLError('No such language') code = language.code else: raise URLError('No such file') # Find the template, mutating any missing template exception. try: path, fp = find(template, mlist, code) except TemplateNotFoundError: raise URLError('No such file') return addinfourl(fp, {}, original_url)
def _prepare_mocks(mock_open, mock_urlopen, mock_tar_open): mock_urlopen.return_value = response.addinfourl( io.BytesIO('some raw tar.gz'.encode('utf-8')), {'content-encoding': 'utf-8'}, '', 200) mock_tar = mock.create_autospec(tarfile.TarFile) mock_tar_open.return_value.__enter__.return_value = mock_tar # First file, not a file... tar_info_not_file = mock.create_autospec(tarfile.TarInfo) tar_info_not_file.isfile.return_value = False # Second file is dangerous. tar_info_dot_dot = mock.create_autospec(tarfile.TarInfo) tar_info_dot_dot.isfile.return_value = True type(tar_info_dot_dot).name = '../../etc/shadow' # Third file is dangerous too. tar_info_slash = mock.create_autospec(tarfile.TarInfo) tar_info_slash.isfile.return_value = True type(tar_info_slash).name = '/etc/hosts' # Next file contains rules! tar_info_rule = mock.create_autospec(tarfile.TarInfo) tar_info_rule.isfile.return_value = True type(tar_info_rule).name = 'I.contain.rules' mock_tar.getmembers.return_value = [ tar_info_not_file, tar_info_dot_dot, tar_info_slash, tar_info_rule, ] # Simulate the tar member file that is going to be open. infile = mock.create_autospec(io.FileIO) infile.readlines.return_value = [ b'# hey that\'s a nice file you have here\n', b'# alert udp $HOME_NET any -> $EXTERNAL_NET 53 (msg:"foobar" sid:0;)', b'# drop udp $HOME_NET any -> $EXTERNAL_NET 53 (msg:"foobar" sid:1;)', b'alert udp $HOME_NET any -> $EXTERNAL_NET 53 (msg:"foobar" sid:2;)', b'drop udp $HOME_NET any -> $EXTERNAL_NET 53 (msg:"foobar" sid:3;)', b'\n', b'alert udp $HOME_NET any -> $EXTERNAL_NET 53 (msg:"foobar" sid:4;)', b'drop udp $HOME_NET any -> $EXTERNAL_NET 53 (msg:"ddos" sid:5;)', ] mock_tar.extractfile.return_value = infile # Simulate the output file that is going to be open. out = mock.create_autospec(io.FileIO) mock_open.return_value.__enter__.return_value = out return out, mock_tar
def wrapper(client, method, url, body=None, headers=None): cookiejar = LWPCookieJar() cookiejar._really_load( StringIO("#LWP-Cookies-2.0\n" + client.credential.get(field,'')), "cookies.txt",True,True) req = Request(url, body, headers or {}, method=method) cookiejar.clear_expired_cookies() cookiejar.add_cookie_header(req) status, headers, body = request(client,req.method,req.full_url,req.data,dict(req.header_items())) response = addinfourl(None, headers, req.full_url, status) cookiejar.extract_cookies(response,req) client.credential[field] = cookiejar.as_lwp_str() return (status, headers, body)
def _make_response(self, result, url): data = "\r\n".join(["%s: %s" % (k, v) for k, v in result.header_items]) if PY2: headers = HTTPMessage(BytesIO(data)) else: import email headers = email.message_from_string(data) response = addinfourl(BytesIO(result.data), headers, url) code, msg = result.status.split(None, 1) response.code, response.msg = int(code), msg return response
def mailman_open(self, req): # Parse urls of the form: # # mailman:///<fqdn_listname>/<language>/<template_name> # # where only the template name is required. mlist = code = template = None # Parse the full requested URL and be sure it's something we handle. original_url = req.get_full_url() parsed = urlparse(original_url) assert parsed.scheme == 'mailman' # The path can contain one, two, or three components. Since no empty # path components are legal, filter them out. parts = [p for p in parsed.path.split('/') if p] if len(parts) == 0: raise URLError('No template specified') elif len(parts) == 1: template = parts[0] elif len(parts) == 2: part0, template = parts # Is part0 a language code or a mailing list? It better be one or # the other, and there's no possibility of namespace collisions # because language codes don't contain @ and mailing list names # MUST contain @. language = getUtility(ILanguageManager).get(part0) mlist = getUtility(IListManager).get(part0) if language is None and mlist is None: raise URLError('Bad language or list name') elif mlist is None: code = language.code elif len(parts) == 3: fqdn_listname, code, template = parts mlist = getUtility(IListManager).get(fqdn_listname) if mlist is None: raise URLError('Missing list') language = getUtility(ILanguageManager).get(code) if language is None: raise URLError('No such language') code = language.code else: raise URLError('No such file') # Find the template, mutating any missing template exception. try: path, fp = find(template, mlist, code) except TemplateNotFoundError: raise URLError('No such file') return addinfourl(fp, {}, original_url)
def nfs_open(self, req): url = req.full_url directory_name, file_name = os.path.split(url) server_name = req.host print('Simulazione montaggio FauxNFSHandler:') print(' Percorso remoto : {}'.format(directory_name)) print(' Server : {}'.format(server_name)) print(' Percorso locale : {}'.format( os.path.basename(tempdir))) print(' Nome file : {}'.format(file_name)) local_file = os.path.join(tempdir, file_name) fp = NFSFile(tempdir, file_name) content_type = ( mimetypes.guess_type(file_name)[0] or 'application/octet-stream' ) stats = os.stat(local_file) size = stats.st_size headers = { 'Content-type': content_type, 'Content-length': size, } return response.addinfourl(fp, headers, req.get_full_url())
def do_open(self, http_class, req): """Return an addinfourl object for the request, using http_class. http_class must implement the HTTPConnection API from httplib. The addinfourl return value is a file-like object. It also has methods and attributes including: - info(): return a mimetools.Message object for the headers - geturl(): return the original request URL - code: HTTP status code """ host = req.host if not host: raise urlllib2.URLError('no host given') h = http_class(host, timeout=req.timeout) # will parse host:port h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update(dict((k, v) for k, v in list(req.headers.items()) if k not in headers)) # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. # Don't close connection when connection_cache is enabled, if fetch.connection_cache is None: headers["Connection"] = "close" else: headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 headers = dict( (name.title(), val) for name, val in list(headers.items())) if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: h.request(req.get_method(), req.selector, req.data, headers) except socket.error as err: # XXX what error? # Don't close connection when cache is enabled. if fetch.connection_cache is None: h.close() raise urllib.error.URLError(err) else: try: r = h.getresponse(buffering=True) except TypeError: # buffering kw not supported r = h.getresponse() # Pick apart the HTTPResponse object to get the addinfourl # object initialized properly. # Wrap the HTTPResponse object in socket's file object adapter # for Windows. That adapter calls recv(), so delegate recv() # to read(). This weird wrapping allows the returned object to # have readline() and readlines() methods. # XXX It might be better to extract the read buffering code # out of socket._fileobject() and into a base class. r.recv = r.read # no data, just have to read r.read() class fp_dummy(object): def read(self): return "" def readline(self): return "" def close(self): pass resp = addinfourl(fp_dummy(), r.msg, req.get_full_url()) resp.code = r.status resp.msg = r.reason # Close connection when server request it. if fetch.connection_cache is not None: if 'Connection' in r.msg and r.msg['Connection'] == 'close': fetch.connection_cache.remove_connection(h.host, h.port) return resp
def http_error_303(self, url, fp, errcode, errmsg, headers, data=None): return addinfourl(fp, headers, url, errcode)
def smb_open(self, req): global USE_NTLM, MACHINE_NAME host = req.get_host() if not host: raise urllib.error.URLError('SMB error: no host given') host, port = splitport(host) if port is None: port = 139 else: port = int(port) # username/password handling user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = user or '' passwd = passwd or '' myname = MACHINE_NAME or self.generateClientMachineName() n = NetBIOS() names = n.queryIPForName(host) if names: server_name = names[0] else: raise urllib.error.URLError('SMB error: Hostname does not reply back with its machine name') path, attrs = splitattr(req.get_selector()) if path.startswith('/'): path = path[1:] dirs = path.split('/') dirs = list(map(unquote, dirs)) service, path = dirs[0], '/'.join(dirs[1:]) try: conn = SMBConnection(user, passwd, myname, server_name, use_ntlm_v2 = USE_NTLM) conn.connect(host, port) headers = email.message.Message() if req.has_data(): data_fp = req.get_data() filelen = conn.storeFile(service, path, data_fp) headers.add_header('Content-length', '0') fp = BytesIO(b"") else: fp = self.createTempFile() file_attrs, retrlen = conn.retrieveFile(service, path, fp) fp.seek(0) mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers.add_header('Content-type', mtype) if retrlen is not None and retrlen >= 0: headers.add_header('Content-length', '%d' % retrlen) return addinfourl(fp, headers, req.get_full_url()) except Exception as ex: raise urllib.error.URLError('smb error: %s' % ex).with_traceback(sys.exc_info()[2])
def http_error_302(self, reqst, fp, code, msg, head): infourl = addinfourl(fp, head, reqst.get_full_url()) infourl.status = code infourl.code = code return infourl
def ftp_open(self, req): host = req.get_host() if not host: raise IOError('ftp error', 'no host given') parsed = urlparse(host) port = parsed.port or ftplib.FTP_PORT user = unquote(parsed.username or '') passwd = unquote(parsed.passwd or '') host = unquote(host) try: host = socket.gethostbyname(host) except socket.error as msg: raise urllib.error.URLError(msg) path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = list(map(unquote, dirs)) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] try: fw = self.connect_ftp(user, passwd, host, port, dirs) type = file and 'I' or 'D' for attr in attrs: attr, value = splitattr(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() # -- range support modifications start here rest = None range_tup = range_header_to_tuple(req.headers.get('Range',None)) assert range_tup != () if range_tup: (fb,lb) = range_tup if fb > 0: rest = fb # -- range support modifications end here fp, retrlen = fw.retrfile(file, type, rest) # -- range support modifications start here if range_tup: (fb,lb) = range_tup if lb == '': if retrlen is None or retrlen == 0: raise RangeError(9, 'Requested Range Not Satisfiable due to unobtainable file length.') lb = retrlen retrlen = lb - fb if retrlen < 0: # beginning of range is larger than file raise RangeError(9, 'Requested Range Not Satisfiable') else: retrlen = lb - fb fp = RangeableFileObject(fp, (0,retrlen)) # -- range support modifications end here headers = b"" mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += b"Content-Type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += b"Content-Length: %d\n" % retrlen sf = BytesIO(headers) headers = email.message.Message(sf) return addinfourl(fp, headers, req.get_full_url()) except ftplib.all_errors as msg: error = IOError('ftp error', msg) six.reraise(error.__class__, error, sys.exc_info()[2])
def do_open(self, http_class, req): """Return an addinfourl object for the request, using http_class. http_class must implement the HTTPConnection API from httplib. The addinfourl return value is a file-like object. It also has methods and attributes including: - info(): return a mimetools.Message object for the headers - geturl(): return the original request URL - code: HTTP status code """ host = req.host if not host: raise urlllib2.URLError('no host given') h = http_class(host, timeout=req.timeout) # will parse host:port h.set_debuglevel(self._debuglevel) headers = dict(req.unredirected_hdrs) headers.update(dict((k, v) for k, v in list(req.headers.items()) if k not in headers)) # We want to make an HTTP/1.1 request, but the addinfourl # class isn't prepared to deal with a persistent connection. # It will try to read all remaining data from the socket, # which will block while the server waits for the next request. # So make sure the connection gets closed after the (only) # request. # Don't close connection when connection_cache is enabled, if fetch.connection_cache is None: headers["Connection"] = "close" else: headers["Connection"] = "Keep-Alive" # Works for HTTP/1.0 headers = dict( (name.title(), val) for name, val in list(headers.items())) if req._tunnel_host: tunnel_headers = {} proxy_auth_hdr = "Proxy-Authorization" if proxy_auth_hdr in headers: tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr] # Proxy-Authorization should not be sent to origin # server. del headers[proxy_auth_hdr] h.set_tunnel(req._tunnel_host, headers=tunnel_headers) try: h.request(req.get_method(), req.selector, req.data, headers) except socket.error as err: # XXX what error? # Don't close connection when cache is enabled. # Instead, try to detect connections that are no longer # usable (for example, closed unexpectedly) and remove # them from the cache. if fetch.connection_cache is None: h.close() elif isinstance(err, OSError) and err.errno == errno.EBADF: # This happens when the server closes the connection despite the Keep-Alive. # Apparently urllib then uses the file descriptor, expecting it to be # connected, when in reality the connection is already gone. # We let the request fail and expect it to be # tried once more ("try_again" in check_status()), # with the dead connection removed from the cache. # If it still fails, we give up, which can happend for bad # HTTP proxy settings. fetch.connection_cache.remove_connection(h.host, h.port) raise urllib.error.URLError(err) else: try: r = h.getresponse(buffering=True) except TypeError: # buffering kw not supported r = h.getresponse() # Pick apart the HTTPResponse object to get the addinfourl # object initialized properly. # Wrap the HTTPResponse object in socket's file object adapter # for Windows. That adapter calls recv(), so delegate recv() # to read(). This weird wrapping allows the returned object to # have readline() and readlines() methods. # XXX It might be better to extract the read buffering code # out of socket._fileobject() and into a base class. r.recv = r.read # no data, just have to read r.read() class fp_dummy(object): def read(self): return "" def readline(self): return "" def close(self): pass resp = addinfourl(fp_dummy(), r.msg, req.get_full_url()) resp.code = r.status resp.msg = r.reason # Close connection when server request it. if fetch.connection_cache is not None: if 'Connection' in r.msg and r.msg['Connection'] == 'close': fetch.connection_cache.remove_connection(h.host, h.port) return resp
def retry_using_http_NTLM_auth(self, req, auth_header_field, realm, headers): user, pw = self.passwd.find_user_password(realm, req.get_full_url()) if pw is not None: user_parts = user.split('\\', 1) if len(user_parts) == 1: UserName = user_parts[0] DomainName = '' type1_flags = ntlm.NTLM_TYPE1_FLAGS & ~ntlm.NTLM_NegotiateOemDomainSupplied else: DomainName = user_parts[0].upper() UserName = user_parts[1] type1_flags = ntlm.NTLM_TYPE1_FLAGS # ntlm secures a socket, so we must use the same socket for the complete handshake headers = dict(req.headers) headers.update(req.unredirected_hdrs) auth = 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE( user, type1_flags ).decode('ascii') if req.headers.get(self.auth_header, None) == auth: return None headers[self.auth_header] = auth host = req.host if not host: raise urllib.request.URLError('no host given') h = None if req.get_full_url().startswith('https://'): h = http.client.HTTPSConnection(host) # will parse host:port else: h = http.client.HTTPConnection(host) # will parse host:port # we must keep the connection because NTLM authenticates the connection, not single requests headers["Connection"] = "Keep-Alive" headers = dict((name.title(), val) for name, val in list(headers.items())) h.request(req.get_method(), req.selector, req.data, headers) r = h.getresponse() r.begin() r._safe_read(int(r.getheader('content-length'))) try: if r.getheader('set-cookie'): # this is important for some web applications that store authentication-related info in cookies (it took a long time to figure out) headers['Cookie'] = r.getheader('set-cookie') except TypeError: pass r.fp = None # remove the reference to the socket, so that it can not be closed by the response object (we want to keep the socket open) auth_header_value = r.getheader(auth_header_field, None) # some Exchange servers send two WWW-Authenticate headers, one with the NTLM challenge # and another with the 'Negotiate' keyword - make sure we operate on the right one m = re.match('(NTLM [A-Za-z0-9+\-/=]+)', auth_header_value) if m: auth_header_value, = m.groups() (ServerChallenge, NegotiateFlags) = ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value[5:]) auth = 'NTLM %s' % ntlm.create_NTLM_AUTHENTICATE_MESSAGE( ServerChallenge, UserName, DomainName, pw, NegotiateFlags ).decode('ascii') headers[self.auth_header] = auth headers["Connection"] = "Close" headers = dict((name.title(), val) for name, val in list(headers.items())) try: h.request(req.get_method(), req.selector, req.data, headers) # none of the configured handlers are triggered, for example redirect-responses are not handled! response = h.getresponse() def notimplemented(): raise NotImplementedError response.readline = notimplemented return addinfourl(response, response.msg, req.get_full_url(), response.code) except socket.error as err: raise urllib.request.URLError(err) else: return None
def mock_response(req, data: str): """Return an HttpResponse object with the given data.""" resp = addinfourl(BytesIO(data), "message", req.get_full_url()) resp.code = 200 resp.msg = "OK" return resp
def http_error_default(self, req, fp, code, msg, headers): infourl = addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code infourl.msg = msg return infourl
def http_error_303(self, req, fp, code, msg, headers): infourl = addinfourl(fp, headers, req.get_full_url()) infourl.status = code infourl.code = code return infourl
def _get_mock_response(self, content): return response.addinfourl( io.BytesIO(content.encode('utf-8')), {'content-encoding': 'utf-8'}, '', 200)
def ftp_open(self, req): """ When ftp requests are made using this handler, this function gets called at some point, and it in turn calls the ``connect_ftp`` method. In this subclass's reimplementation of ``connect_ftp``, the FQDN of the request's host is needed for looking up login credentials in the password manager. However, by the time ``connect_ftp`` is called, that information has been stripped away, and the host argument passed to ``connect_ftp`` contains only the host's IP address instead of the FQDN. This reimplementation of ``ftp_open``, which is little more than a copy-and-paste from the superclass's implementation, captures the original host FQDN before it is replaced with the IP address and saves it for later use. This reimplementation also ensures that the file size appears in the response header by querying for it directly. For some FTP servers the original implementation should handle this (``retrlen`` should contain the file size). However, for others this can fail silently due to the server response not matching an anticipated regular expression. """ import sys import email import socket from urllib.error import URLError from urllib.parse import splitattr, splitpasswd, splitvalue from urllib.response import addinfourl #################################################### # COPIED FROM FTPHandler.ftp_open (PYTHON 3.6.6) # # WITH JUST A FEW ADDITIONS # #################################################### import ftplib import mimetypes host = req.host if not host: raise URLError('ftp error: no host given') host, port = splitport(host) if port is None: port = ftplib.FTP_PORT else: port = int(port) # username/password handling user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = user or '' passwd = passwd or '' ############################################ # DIFFERENT FROM FTPHandler.ftp_open # save the host FQDN for later self.last_req_host = host ############################################ try: host = socket.gethostbyname(host) except OSError as msg: raise URLError(msg) path, attrs = splitattr(req.selector) dirs = path.split('/') dirs = list(map(unquote, dirs)) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] try: fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout) type = file and 'I' or 'D' for attr in attrs: attr, value = splitvalue(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() ############################################ # DIFFERENT FROM FTPHandler.ftp_open size = fw.ftp.size(file) ############################################ fp, retrlen = fw.retrfile(file, type) headers = "" mtype = mimetypes.guess_type(req.full_url)[0] if mtype: headers += "Content-type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += "Content-length: %d\n" % retrlen ############################################ # DIFFERENT FROM FTPHandler.ftp_open elif size is not None and size >= 0: headers += "Content-length: %d\n" % size ############################################ headers = email.message_from_string(headers) return addinfourl(fp, headers, req.full_url) except ftplib.all_errors as exp: exc = URLError('ftp error: %r' % exp) raise exc.with_traceback(sys.exc_info()[2])
def ftp_open(self, req): host = req.get_host() if not host: raise IOError('ftp error', 'no host given') parsed = urlparse(host) port = parsed.port or ftplib.FTP_PORT user = unquote(parsed.username or '') passwd = unquote(parsed.passwd or '') host = unquote(host) try: host = socket.gethostbyname(host) except socket.error as msg: raise urllib.error.URLError(msg) path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = list(map(unquote, dirs)) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] try: fw = self.connect_ftp(user, passwd, host, port, dirs) type = file and 'I' or 'D' for attr in attrs: attr, value = splitattr(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() # -- range support modifications start here rest = None range_tup = range_header_to_tuple(req.headers.get('Range', None)) assert range_tup != () if range_tup: (fb, lb) = range_tup if fb > 0: rest = fb # -- range support modifications end here fp, retrlen = fw.retrfile(file, type, rest) # -- range support modifications start here if range_tup: (fb, lb) = range_tup if lb == '': if retrlen is None or retrlen == 0: raise RangeError( 9, 'Requested Range Not Satisfiable due to unobtainable file length.' ) lb = retrlen retrlen = lb - fb if retrlen < 0: # beginning of range is larger than file raise RangeError(9, 'Requested Range Not Satisfiable') else: retrlen = lb - fb fp = RangeableFileObject(fp, (0, retrlen)) # -- range support modifications end here headers = b"" mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += b"Content-Type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += b"Content-Length: %d\n" % retrlen sf = BytesIO(headers) headers = email.message.Message(sf) return addinfourl(fp, headers, req.get_full_url()) except ftplib.all_errors as msg: error = IOError('ftp error', msg) six.reraise(error.__class__, error, sys.exc_info()[2])
def get_response(self): """Returns a copy of the current response.""" return addinfourl(BytesIO(self.data), self._response.info(), self._response.geturl())
def _get_mock_response(self, content): return response.addinfourl(io.BytesIO(content.encode('utf-8')), {'content-encoding': 'utf-8'}, '', 200)
def get_response_like_object(proxy_request): fp_like = BytesIO(content) url = proxy_request.get_full_url() return addinfourl(fp_like, headers, url, retcode)
def smb_open(self, req): global USE_NTLM, MACHINE_NAME if not req.host: raise urllib.error.URLError('SMB error: no host given') host, port = splitport(req.host) if port is None: port = 139 else: port = int(port) # username/password handling user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = user or '' domain = '' if ';' in user: domain, user = user.split(';', 1) passwd = passwd or '' myname = MACHINE_NAME or self.generateClientMachineName() server_name, host = host.split(',') if ',' in host else [None, host] if server_name is None: n = NetBIOS() names = n.queryIPForName(host) if names: server_name = names[0] else: raise urllib.error.URLError( 'SMB error: Hostname does not reply back with its machine name' ) path, attrs = splitattr(req.selector) if path.startswith('/'): path = path[1:] dirs = path.split('/') dirs = list(map(unquote, dirs)) service, path = dirs[0], '/'.join(dirs[1:]) try: conn = SMBConnection(user, passwd, myname, server_name, domain=domain, use_ntlm_v2=USE_NTLM) print('about to connect') conn.connect(host, port) headers = email.message.Message() if req.data: filelen = conn.storeFile(service, path, req.data) headers.add_header('Content-length', '0') fp = BytesIO(b"") else: fp = self.createTempFile() file_attrs, retrlen = conn.retrieveFile(service, path, fp) fp.seek(0) mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers.add_header('Content-type', mtype) if retrlen is not None and retrlen >= 0: headers.add_header('Content-length', '%d' % retrlen) return addinfourl(fp, headers, req.get_full_url()) except Exception as ex: raise urllib.error.URLError('smb error: %s' % ex).with_traceback( sys.exc_info()[2])
def http_error_302(self, req, fp, code, msg, headers): infourl = urllib.addinfourl(fp, headers, headers["Location"]) infourl.status = code infourl.code = code return infourl
def noop_open(self, req): headers = {'Content-Encoding': 'application/json'} data = io.BytesIO(modify_from_query(req.full_url, req.data)) return addinfourl(data, headers, req.full_url, code=200)
def _GetResp(data, status_code=200): return response.addinfourl( io.BytesIO(data.encode('utf-8')), {'Content-Encoding': 'utf-8'}, '', status_code)