def Dispatch(self, request, outfile, base_env_dict=None): """Handle post dispatch. This dispatcher will handle all uploaded files in the POST request, store the results in the blob-storage, close the upload session and transform the original request in to one where the uploaded files have external bodies. Returns: New AppServerRequest indicating request forward to upload success handler. """ if base_env_dict['REQUEST_METHOD'] != 'POST': outfile.write('Status: 400\n\n') return upload_key = re.match(UPLOAD_URL_PATTERN, request.relative_url).group(1) try: upload_session = datastore.Get(upload_key) except datastore_errors.EntityNotFoundError: upload_session = None if upload_session: success_path = upload_session['success_path'] upload_form = cgi.FieldStorage(fp=request.infile, headers=request.headers, environ=base_env_dict) try: mime_message_string = self.__cgi_handler.GenerateMIMEMessageString( upload_form) datastore.Delete(upload_session) self.current_session = upload_session header_end = mime_message_string.find('\n\n') + 1 content_start = header_end + 1 header_text = mime_message_string[:header_end].replace('\n', '\r\n') content_text = mime_message_string[content_start:].replace('\n', '\r\n') complete_headers = ('%s' 'Content-Length: %d\r\n' '\r\n') % (header_text, len(content_text)) return dev_appserver.AppServerRequest( success_path, None, mimetools.Message(cStringIO.StringIO(complete_headers)), cStringIO.StringIO(content_text), force_admin=True) except dev_appserver_upload.InvalidMIMETypeFormatError: outfile.write('Status: 400\n\n') else: logging.error('Could not find session for %s', upload_key) outfile.write('Status: 404\n\n')
def parse_multipart(fp, pdict): """Parse multipart input. Arguments: fp : input file pdict: dictionary containing other parameters of content-type header Returns a dictionary just like parse_qs(): keys are the field names, each value is a list of values for that field. This is easy to use but not much good if you are expecting megabytes to be uploaded -- in that case, use the FieldStorage class instead which is much more flexible. Note that content-type is the raw, unparsed contents of the content-type header. XXX This does not parse nested multipart parts -- use FieldStorage for that. XXX This should really be subsumed by FieldStorage altogether -- no point in having two implementations of the same parsing algorithm. Also, FieldStorage protects itself better against certain DoS attacks by limiting the size of the data read in one chunk. The API here does not support that kind of protection. This also affects parse() since it can call parse_multipart(). """ boundary = "" if 'boundary' in pdict: boundary = pdict['boundary'] if not valid_boundary(boundary): raise ValueError, ('Invalid boundary in multipart form: %r' % (boundary, )) nextpart = "--" + boundary lastpart = "--" + boundary + "--" partdict = {} terminator = "" while terminator != lastpart: bytes = -1 data = None if terminator: # At start of next part. Read headers first. headers = mimetools.Message(fp) clength = headers.getheader('content-length') if clength: try: bytes = int(clength) except ValueError: pass if bytes > 0: if maxlen and bytes > maxlen: raise ValueError, 'Maximum content length exceeded' data = fp.read(bytes) else: data = "" # Read lines until end of part. lines = [] while 1: line = fp.readline() if not line: terminator = lastpart # End outer loop break if line[:2] == "--": terminator = line.strip() if terminator in (nextpart, lastpart): break lines.append(line) # Done with part. if data is None: continue if bytes < 0: if lines: # Strip final line terminator line = lines[-1] if line[-2:] == "\r\n": line = line[:-2] elif line[-1:] == "\n": line = line[:-1] lines[-1] = line data = "".join(lines) line = headers['content-disposition'] if not line: continue key, params = parse_header(line) if key != 'form-data': continue if 'name' in params: name = params['name'] else: continue if name in partdict: partdict[name].append(data) else: partdict[name] = [data] return partdict
def parse_multipart(fp, pdict): """Parse multipart input. Arguments: fp : input file pdict: dictionary containing other parameters of conten-type header Returns a dictionary just like parse_qs(): keys are the field names, each value is a list of values for that field. This is easy to use but not much good if you are expecting megabytes to be uploaded -- in that case, use the FieldStorage class instead which is much more flexible. Note that content-type is the raw, unparsed contents of the content-type header. XXX This does not parse nested multipart parts -- use FieldStorage for that. XXX This should really be subsumed by FieldStorage altogether -- no point in having two implementations of the same parsing algorithm. """ if pdict.has_key('boundary'): boundary = pdict['boundary'] else: boundary = "" nextpart = "--" + boundary lastpart = "--" + boundary + "--" partdict = {} terminator = "" while terminator != lastpart: bytes = -1 data = None if terminator: # At start of next part. Read headers first. headers = mimetools.Message(fp) clength = headers.getheader('content-length') if clength: try: bytes = string.atoi(clength) except string.atoi_error: pass if bytes > 0: if maxlen and bytes > maxlen: raise ValueError, 'Maximum content length exceeded' data = fp.read(bytes) else: data = "" # Read lines until end of part. lines = [] while 1: line = fp.readline() if not line: terminator = lastpart # End outer loop break if line[:2] == "--": terminator = string.strip(line) if terminator in (nextpart, lastpart): break lines.append(line) # Done with part. if data is None: continue if bytes < 0: if lines: # Strip final line terminator line = lines[-1] if line[-2:] == "\r\n": line = line[:-2] elif line[-1:] == "\n": line = line[:-1] lines[-1] = line data = string.joinfields(lines, "") line = headers['content-disposition'] if not line: continue key, params = parse_header(line) if key != 'form-data': continue if params.has_key('name'): name = params['name'] else: continue if partdict.has_key(name): partdict[name].append(data) else: partdict[name] = [data] return partdict
def smb_open(self, req): global USE_NTLM, MACHINE_NAME host = req.get_host() if not host: raise urllib2.URLError('SMB error: no host given') host, port = splitport(host) if port is None: port = 139 else: port = int(port) # username/password handling user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = user or '' domain = '' if ';' in user: domain, user = user.split(';', 1) passwd = passwd or '' myname = MACHINE_NAME or self.generateClientMachineName() server_name,host = host.split(',') if ',' in host else [None,host] if server_name is None: n = NetBIOS() names = n.queryIPForName(host) if names: server_name = names[0] else: raise urllib2.URLError('SMB error: Hostname does not reply back with its machine name') path, attrs = splitattr(req.get_selector()) if path.startswith('/'): path = path[1:] dirs = path.split('/') dirs = map(unquote, dirs) service, path = dirs[0], '/'.join(dirs[1:]) try: conn = SMBConnection(user, passwd, myname, server_name, domain=domain, use_ntlm_v2 = USE_NTLM) conn.connect(host, port) if req.has_data(): data_fp = req.get_data() filelen = conn.storeFile(service, path, data_fp) headers = "Content-length: 0\n" fp = StringIO("") else: fp = self.createTempFile() file_attrs, retrlen = conn.retrieveFile(service, path, fp) fp.seek(0) headers = "" mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += "Content-type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += "Content-length: %d\n" % retrlen sf = StringIO(headers) headers = mimetools.Message(sf) return addinfourl(fp, headers, req.get_full_url()) except Exception, ex: raise urllib2.URLError, ('smb error: %s' % ex), sys.exc_info()[2]
def msg(self): msg = mimetools.Message(StringIO.StringIO('')) for name, value in self._fetch_response.headers.items(): msg[name] = str(value) return msg
def parse_multipart(fp, pdict): boundary = '' if 'boundary' in pdict: boundary = pdict['boundary'] if not valid_boundary(boundary): raise ValueError, 'Invalid boundary in multipart form: %r' % ( boundary, ) nextpart = '--' + boundary lastpart = '--' + boundary + '--' partdict = {} terminator = '' while terminator != lastpart: bytes = -1 data = None if terminator: headers = mimetools.Message(fp) clength = headers.getheader('content-length') if clength: try: bytes = int(clength) except ValueError: pass if bytes > 0: if maxlen and bytes > maxlen: raise ValueError, 'Maximum content length exceeded' data = fp.read(bytes) else: data = '' lines = [] while 1: line = fp.readline() if not line: terminator = lastpart break if line[:2] == '--': terminator = line.strip() if terminator in (nextpart, lastpart): break lines.append(line) if data is None: continue if bytes < 0: if lines: line = lines[-1] if line[-2:] == '\r\n': line = line[:-2] elif line[-1:] == '\n': line = line[:-1] lines[-1] = line data = ''.join(lines) line = headers['content-disposition'] if not line: continue key, params = parse_header(line) if key != 'form-data': continue if 'name' in params: name = params['name'] else: continue if name in partdict: partdict[name].append(data) else: partdict[name] = [data] return partdict
def process_jitterbug(filename): current = {} current['number'] = int(filename) current['notes'] = [] current['attachments'] = [] current['description'] = '' current['date-reported'] = () current['short-description'] = '' file = open(filename, "r") msg = mimetools.Message(file) msgtype = msg.gettype() add_notes(current) current['date-reported'] = rfc822.parsedate_tz(msg['Date']) current['short-description'] = msg['Subject'] if msgtype[:5] == 'text/': process_text_plain(msg, current) elif msgtype[:10] == "multipart/": process_multi_part(file, msg, current) else: # Huh? This should never happen. print "Unknown content-type: %s" % msgtype sys.exit(1) # At this point we have processed the message: we have all of the notes and # attachments stored, so it's time to add things to the database. # The schema for JitterBug 2.14 can be found at: # # http://www.trilobyte.net/barnsons/html/dbschema.html # # The following fields need to be provided by the user: # # bug_status # product # version # reporter # component # resolution # change this to the user_id of the Bugzilla user who is blessed with the # imported defects reporter = 6 # the resolution will need to be set manually resolution = "" db = MySQLdb.connect(db='bugs', user='******', host='localhost') cursor = db.cursor() cursor.execute( "INSERT INTO bugs SET " \ "bug_id=%s," \ "bug_severity='normal'," \ "bug_status=%s," \ "creation_ts=%s," \ "delta_ts=%s," \ "short_desc=%s," \ "product=%s," \ "rep_platform='All'," \ "assigned_to=%s," "reporter=%s," \ "version=%s," \ "component=%s," \ "resolution=%s", [ current['number'], bug_status, time.strftime("%Y-%m-%d %H:%M:%S", current['date-reported'][:9]), time.strftime("%Y-%m-%d %H:%M:%S", current['date-reported'][:9]), current['short-description'], product, reporter, reporter, version, component, resolution] ) # This is the initial long description associated with the bug report cursor.execute("INSERT INTO longdescs VALUES (%s,%s,%s,%s)", [ current['number'], reporter, time.strftime("%Y-%m-%d %H:%M:%S", current['date-reported'][:9]), current['description'] ]) # Add whatever notes are associated with this defect for n in current['notes']: cursor.execute("INSERT INTO longdescs VALUES (%s,%s,%s,%s)", [ current['number'], reporter, time.strftime("%Y-%m-%d %H:%M:%S", n['timestamp'][:9]), n['text'] ]) # add attachments associated with this defect for a in current['attachments']: cursor.execute( "INSERT INTO attachments SET " \ "bug_id=%s, creation_ts=%s, description='', mimetype=%s," \ "filename=%s, thedata=%s, submitter_id=%s", [ current['number'], time.strftime("%Y-%m-%d %H:%M:%S", current['date-reported'][:9]), a[1], a[0], a[2], reporter ]) cursor.close() db.close()
def parse_request(self): try: self.raw_requestline = self.rfile.readline(65537) except: raise GetReqTimeout() if not self.raw_requestline: raise GetReqTimeout() if len(self.raw_requestline) > 65536: raise ParseReqFail("Recv command line too large") if self.raw_requestline[0] == '\x16': raise socket.error self.command = b'' # set in case of error on the first line self.path = b'' self.request_version = version = self.default_request_version requestline = self.raw_requestline requestline = requestline.rstrip(b'\r\n') self.requestline = requestline words = requestline.split() if len(words) == 3: command, path, version = words if version[:5] != b'HTTP/': raise ParseReqFail("Req command format fail:%s" % requestline) try: base_version_number = version.split(b'/', 1)[1] version_number = base_version_number.split(b".") # RFC 2145 section 3.1 says there can be only one "." and # - major and minor numbers MUST be treated as # separate integers; # - HTTP/2.4 is a lower version than HTTP/2.13, which in # turn is lower than HTTP/12.3; # - Leading zeros MUST be ignored by recipients. if len(version_number) != 2: raise ParseReqFail("Req command format fail:%s" % requestline) version_number = int(version_number[0]), int(version_number[1]) except (ValueError, IndexError): raise ParseReqFail("Req command format fail:%s" % requestline) if version_number >= (1, 1): self.close_connection = 0 if version_number >= (2, 0): raise ParseReqFail("Req command format fail:%s" % requestline) elif len(words) == 2: command, path = words self.close_connection = 1 if command != b'GET': raise ParseReqFail("Req command format HTTP/0.9 line:%s" % requestline) elif not words: raise ParseReqFail("Req command format fail:%s" % requestline) else: raise ParseReqFail("Req command format fail:%s" % requestline) self.command, self.path, self.request_version = command, path, version # Parse HTTP headers if sys.version_info[0] == 3: headers = client.parse_headers(self.rfile, _class=self.MessageClass) self.headers = dict(map(utils.to_bytes, headers.items())) else: self.headers = {} headers = mimetools.Message(self.rfile, 0) for line in headers.headers: line = line.strip() k, v = line.split(":", 1) self.headers[k] = v.lstrip() self.host = self.headers.get(b'Host', b"") conntype = self.headers.get(b'Connection', b"") if conntype.lower() == b'close': self.close_connection = 1 elif conntype.lower() == b'keep-alive': self.close_connection = 0 self.upgrade = self.headers.get(b'Upgrade', b"").lower() return True
import unittest from test import test_support import string, StringIO, mimetools, sets msgtext1 = mimetools.Message( StringIO.StringIO( """Content-Type: text/plain; charset=iso-8859-1; format=flowed Content-Transfer-Encoding: 8bit Foo! """)) class MimeToolsTest(unittest.TestCase): def test_decodeencode(self): start = string.ascii_letters + "=" + string.digits + "\n" for enc in [ '7bit', '8bit', 'base64', 'quoted-printable', 'uuencode', 'x-uuencode', 'uue', 'x-uue' ]: i = StringIO.StringIO(start) o = StringIO.StringIO() mimetools.encode(i, o, enc) i = StringIO.StringIO(o.getvalue()) o = StringIO.StringIO() mimetools.decode(i, o, enc) self.assertEqual(o.getvalue(), start) def test_boundary(self): s = sets.Set([""])
def download(self): print('Downloading %r' % self.name) now = self.size_now size_bytes = self.size_bytes sock = socket.socket() context = ssl._create_unverified_context() conn = context.wrap_socket(sock) conn.connect((self.video_data['host'], 443)) print('Connected') # send pkt then recv msg = self.pkt while msg: msg = msg[conn.send(msg):] inputs = [conn] get_header = True headers = '' res_line = None print("Downloading...") while True: i, o, e = select.select(inputs, inputs, inputs) if conn in i: try: response = conn.recv(self.buff_recv) except Exception as e: print e break if not response: break if get_header: pos = 0 data = response.splitlines(1) while pos < len(data): lines = data[pos] if not lines.splitlines()[0]: get_header = False response = ''.join(data[pos + 1:]) res_line = headers.splitlines(1)[0].rstrip().split( ' ', 2) headers = ''.join(headers.splitlines(1)[1:]) import mimetools, StringIO headers = mimetools.Message( StringIO.StringIO(headers)) print ' '.join(res_line[1:]) if headers.has_key('content-type') and headers[ 'content-type'] != self.video_data[ 'content_type']: print('Invalid content type: %r != %r' % (headers['content-type'], self.video_data['content_type'])) return elif not headers.has_key('content-type'): print('Content unknown') return #if headers.has_key('content-length'): # size_bytes = int(headers['content-length']) break headers += lines pos += 1 if get_header: continue now += len(response) self.f = open(self.name, 'ab') self.f.write(response) self.f.close() print '%s/%s' % (convert_bytes(now), convert_bytes(size_bytes))
def open_ftp(self, url): """ """ host, path = urllib.parse.splithost(url) if not host: raise IOError('ftp error', 'no host given') host, port = urllib.parse.splitport(host) user, host = urllib.parse.splituser(host) # if user: user, passwd = splitpasswd(user) if user: passwd = getpass.getpass() else: passwd = None host = urllib.parse.unquote(host) user = urllib.parse.unquote(user or '') passwd = urllib.parse.unquote(passwd or '') host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = urllib.parse.splitattr(path) path = urllib.parse.unquote(path) dirs = string.splitfields(path, '/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] key = (user, host, port, string.joinfields(dirs, '/')) # XXX thread unsafe! if len(self.ftpcache) > MAXFTPCACHE: # Prune the cache, rather arbitrarily for k in list(self.ftpcache.keys()): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if key not in self.ftpcache: print('Creating ftpwrapper: ', user, host, port, dirs) self.ftpcache[key] = \ urllib.ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = urllib.parse.splitvalue(attr) if string.lower(attr) == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = string.upper(value) (fp, retrlen) = self.ftpcache[key].retrfile(file, type) if retrlen is not None and retrlen >= 0: import mimetools import io headers = mimetools.Message( io.StringIO('Content-Length: %d\n' % retrlen)) else: headers = "" return urllib.addinfourl(fp, headers, "ftp:" + url) except urllib.ftperrors() as msg: raise IOError('ftp error', msg).with_traceback(sys.exc_info()[2])
def begin(self): if self.msg is not None: # we've already started reading the response return line = self.fp.readline() if self.debuglevel > 0: print "reply:", repr(line) try: [version, status, reason] = line.split(None, 2) except ValueError: try: [version, status] = line.split(None, 1) reason = "" except ValueError: version = "HTTP/0.9" status = "200" reason = "" if version[:5] != 'HTTP/': self.close() raise BadStatusLine(line) # The status code is a three-digit number try: self.status = status = int(status) if status < 100 or status > 999: raise BadStatusLine(line) except ValueError: raise BadStatusLine(line) self.reason = reason.strip() if version == 'HTTP/1.0': self.version = 10 elif version.startswith('HTTP/1.'): self.version = 11 # use HTTP/1.1 code for HTTP/1.x where x>=1 elif version == 'HTTP/0.9': self.version = 9 else: raise UnknownProtocol(version) if self.version == 9: self.msg = mimetools.Message(StringIO()) return self.msg = mimetools.Message(self.fp, 0) if self.debuglevel > 0: for hdr in self.msg.headers: print "header:", hdr, # don't let the msg keep an fp self.msg.fp = None # are we using the chunked-style of transfer encoding? tr_enc = self.msg.getheader('transfer-encoding') if tr_enc: if tr_enc.lower() != 'chunked': raise UnknownTransferEncoding() self.chunked = 1 self.chunk_left = None else: self.chunked = 0 # will the connection close at the end of the response? conn = self.msg.getheader('connection') if conn: conn = conn.lower() # a "Connection: close" will always close the connection. if we # don't see that and this is not HTTP/1.1, then the connection will # close unless we see a Keep-Alive header. self.will_close = conn.find('close') != -1 or \ ( self.version != 11 and \ not self.msg.getheader('keep-alive') ) else: # for HTTP/1.1, the connection will always remain open # otherwise, it will remain open IFF we see a Keep-Alive header self.will_close = self.version != 11 and \ not self.msg.getheader('keep-alive') # do we have a Content-Length? # NOTE: RFC 2616, S4.4, #3 says we ignore this if tr_enc is "chunked" length = self.msg.getheader('content-length') if length and not self.chunked: try: self.length = int(length) except ValueError: self.length = None else: self.length = None # does the body have a fixed length? (of zero) if (status == 204 or # No Content status == 304 or # Not Modified 100 <= status < 200): # 1xx codes self.length = 0 # if the connection remains open, and we aren't using chunked, and # a content-length was not provided, then assume that the connection # WILL close. if not self.will_close and \ not self.chunked and \ self.length is None: self.will_close = 1
def _handshake(self): # receive some data; will block data = self.request.recv(_buffer_size) if not data: # socket was closed return True # so that handle() returns # store the received data in _pending, and check if enough data is received yet? logger.debug('received handshake %d bytes\n%s', len(data), data) self._pending += data if self._pending.find('\r\n\r\n') < 0: # not enough data return # extract the first HTTP request until CRLFCRLF parts = self._pending.split('\r\n\r\n', 1) self._pending, data = parts[1], parts[0].strip() firstline = data.split('\r\n', 1)[0] try: # parse first line, and validate method and protocol method, path, protocol = firstline.split(' ', 2) if method != 'GET': raise HTTPError('405 Method Not Allowed') if protocol != 'HTTP/1.1': raise HTTPError('505 HTTP Version Not Supported') # extract headers, and validate some headers. headers = mimetools.Message(StringIO(data.split('\r\n', 1)[1])) if headers.get('Upgrade', None) != 'websocket': raise HTTPError('403 Forbidden', 'missing or invalid Upgrade header') if headers.get('Connection', None) != 'Upgrade': raise HttpError('400 Bad Request', 'missing or invalid Connection header') if 'Sec-WebSocket-Key' not in headers: raise HTTPError('400 Bad Request', 'missing Sec-WebSocket-Key header') if headers.get('Sec-WebSocket-Version', None) != '13': raise HttpError('400 Bad Request', 'missing or unsupported Sec-WebSocket-Version') result = None if hasattr(self.server, 'onhandshake') and callable(self.server.onhandshake): result = self.server.onhandshake(self, path, headers) # may raise HTTPError # if result is None, onhandshake is undefined; assume success self.path = path # so that apps can use it # generate the response, and append result returned by onhandshake if applicable key = headers['Sec-WebSocket-Key'] digest = base64.b64encode(hashlib.sha1(key + _magic).hexdigest().decode('hex')) response = ['HTTP/1.1 101 Switching Protocols', 'Upgrade: websocket', 'Connection: Upgrade', 'Sec-WebSocket-Accept: %s' % digest] if result: response.extend(result) response = '\r\n'.join(response) + '\r\n\r\n' # send the response, and finish handshake phase. _pending contains remaining data if any. logger.debug('sending handshake %d bytes\n%s', len(response), response) self.request.sendall(response) self._handshake_done = True logger.info('%s - %s - HTTP/1.1 101 Switching Protocols', self.client_address, firstline) # invoke onopen callback _callit(self.server, 'onopen', self) except HTTPError, e: # send error response logger.debug('sending handshake response\n%s', str(e)) self.request.sendall(str(e)) logger.info('%s - %s - HTTP/1.1 %s', self.client_address, firstline, e.response) return True # so that handle() returns
return self.open_local_file(url) def open_local_file(self, url): """Use local file.""" import mimetypes, mimetools, email.Utils, StringIO host, file = splithost(url) localname = url2pathname(file) try: stats = os.stat(localname) except OSError, e: raise IOError(e.errno, e.strerror, e.filename) size = stats.st_size modified = email.Utils.formatdate(stats.st_mtime, usegmt=True) mtype = mimetypes.guess_type(url)[0] headers = mimetools.Message(StringIO.StringIO( 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % (mtype or 'text/plain', size, modified))) if not host: urlfile = file if file[:1] == '/': urlfile = 'file://' + file return addinfourl(open(localname, 'rb'), headers, urlfile) host, port = splitport(host) if not port \ and socket.gethostbyname(host) in (localhost(), thishost()): urlfile = file if file[:1] == '/': urlfile = 'file://' + file return addinfourl(open(localname, 'rb'), headers, urlfile)
print "Initializing socket server..." s = socket.socket() s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind(('', 9876)) s.listen(1) for i in itertools.count(0): print "Waiting for connection %d...." % (i) t, _ = s.accept() print "Handshaking..." #Websocket handshake request_text = t.recv(4096) request_line, headers_alone = request_text.split('\r\n', 1) headers = mimetools.Message(StringIO.StringIO(headers_alone)) accept = base64.b64encode( hashlib.sha1(headers['Sec-WebSocket-Key'] + GUID).digest()) response = SERVER_HANDSHAKE_HYBI % (accept) response = response.strip() + '\r\n\r\n' t.send(response) print "Updating handlers for tag reader..." def rfidTagGained(e): source = e.device rfid.setLEDOn(1) print("RFID %i: Tag Read: %s" % (source.getSerialNum(), e.tag)) t.send(encode(id[e.tag])) def rfidTagLost(e):
class FTPRangeHandler(urllib2.FTPHandler): def ftp_open(self, req): host = req.get_host() if not host: raise IOError, ('ftp error', 'no host given') host, port = splitport(host) if port is None: port = ftplib.FTP_PORT # username/password handling user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = unquote(user or '') passwd = unquote(passwd or '') try: host = socket.gethostbyname(host) except socket.error, msg: raise urllib2.URLError(msg) path, attrs = splitattr(req.get_selector()) dirs = path.split('/') dirs = map(unquote, dirs) dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] try: fw = self.connect_ftp(user, passwd, host, port, dirs) type = file and 'I' or 'D' for attr in attrs: attr, value = splitattr(attr) if attr.lower() == 'type' and \ value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() # -- range support modifications start here rest = None range_tup = range_header_to_tuple(req.headers.get('Range', None)) assert range_tup != () if range_tup: (fb, lb) = range_tup if fb > 0: rest = fb # -- range support modifications end here fp, retrlen = fw.retrfile(file, type, rest) # -- range support modifications start here if range_tup: (fb, lb) = range_tup if lb == '': if retrlen is None or retrlen == 0: raise RangeError( 'Requested Range Not Satisfiable due to unobtainable file length.' ) lb = retrlen retrlen = lb - fb if retrlen < 0: # beginning of range is larger than file raise RangeError('Requested Range Not Satisfiable') else: retrlen = lb - fb fp = RangeableFileObject(fp, (0, retrlen)) # -- range support modifications end here headers = "" mtype = mimetypes.guess_type(req.get_full_url())[0] if mtype: headers += "Content-Type: %s\n" % mtype if retrlen is not None and retrlen >= 0: headers += "Content-Length: %d\n" % retrlen sf = StringIO(headers) headers = mimetools.Message(sf) return addinfourl(fp, headers, req.get_full_url()) except ftplib.all_errors, msg: raise IOError, ('ftp error', msg), sys.exc_info()[2]
def Dispatch(self, request, outfile, base_env_dict=None): """Handle post dispatch. This dispatcher will handle all uploaded files in the POST request, store the results in the blob-storage, close the upload session and transform the original request in to one where the uploaded files have external bodies. Returns: New AppServerRequest indicating request forward to upload success handler. """ if base_env_dict['REQUEST_METHOD'] != 'POST': outfile.write('Status: 400\n\n') return upload_key = re.match(UPLOAD_URL_PATTERN, request.relative_url).group(1) try: upload_session = datastore.Get(upload_key) except datastore_errors.EntityNotFoundError: upload_session = None if upload_session: success_path = upload_session['success_path'] max_bytes_per_blob = upload_session['max_bytes_per_blob'] max_bytes_total = upload_session['max_bytes_total'] bucket_name = upload_session.get('gs_bucket_name', None) upload_form = cgi.FieldStorage(fp=request.infile, headers=request.headers, environ=base_env_dict) try: mime_message_string = self.__cgi_handler.GenerateMIMEMessageString( upload_form, max_bytes_per_blob=max_bytes_per_blob, max_bytes_total=max_bytes_total, bucket_name=bucket_name) datastore.Delete(upload_session) self.current_session = upload_session header_end = mime_message_string.find('\n\n') + 1 content_start = header_end + 1 header_text = mime_message_string[:header_end].replace( '\n', '\r\n') content_text = mime_message_string[content_start:].replace( '\n', '\r\n') complete_headers = ('%s' 'Content-Length: %d\r\n' '\r\n') % (header_text, len(content_text)) return old_dev_appserver.AppServerRequest( success_path, None, mimetools.Message( cStringIO.StringIO(complete_headers)), cStringIO.StringIO(content_text), force_admin=True) except dev_appserver_upload.InvalidMIMETypeFormatError: outfile.write('Status: 400\n\n') except dev_appserver_upload.UploadEntityTooLargeError: outfile.write('Status: 413\n\n') response = ERROR_RESPONSE_TEMPLATE % { 'response_code': 413, 'response_string': 'Request Entity Too Large', 'response_text': 'Your client issued a request that was too ' 'large.' } outfile.write(response) except dev_appserver_upload.FilenameOrContentTypeTooLargeError, ex: outfile.write('Status: 400\n\n') response = ERROR_RESPONSE_TEMPLATE % { 'response_code': 400, 'response_string': 'Bad Request', 'response_text': str(ex) } outfile.write(response)
def handle(self, sock, address, internal=False): """ Handles an incoming HTTP connection. """ try: sock = StatsSocket(sock) rfile = sock.makefile('rb', 4096) # Read the first line first = rfile.readline().strip("\r\n") words = first.split() # Ensure it looks kind of like HTTP if not (2 <= len(words) <= 3): sock.sendall( "HTTP/1.0 400 Bad Request\r\nConnection: close\r\nContent-length: 0\r\n\r\n" ) return path = words[1] # Read the headers headers = mimetools.Message(rfile, 0) # Work out the host try: host = headers['Host'] except KeyError: host = "unknown" headers['Connection'] = "close" if not internal: headers['X-Forwarded-For'] = address[0] headers['X-Forwarded-Protocol'] = "" headers['X-Forwarded-Proto'] = "" # Make sure they're not using odd encodings if "Transfer-Encoding" in headers: sock.sendall( "HTTP/1.0 411 Length Required\r\nConnection: close\r\nContent-length: 0\r\n\r\n" ) return # Match the host to an action protocol = "http" if headers.get('X-Forwarded-Protocol', headers.get('X-Forwarded-Proto', "")).lower() in ("ssl", "https"): protocol = "https" action = self.resolve_host(host, protocol) # Record us as an open connection stats_dict = self.stats.setdefault(action.matched_host, {}) stats_dict['open_requests'] = stats_dict.get('open_requests', 0) + 1 # Run the action try: rfile._rbuf.seek(0) action.handle( sock=sock, read_data=first + "\r\n" + str(headers) + "\r\n" + rfile._rbuf.read(), path=path, headers=headers, ) finally: stats_dict['open_requests'] -= 1 stats_dict['completed_requests'] = stats_dict.get( 'completed_requests', 0) + 1 stats_dict['bytes_sent'] = stats_dict.get('bytes_sent', 0) + sock.bytes_sent stats_dict['bytes_received'] = stats_dict.get( 'bytes_received', 0) + sock.bytes_received except socket.error, e: if e.errno not in (errno.EPIPE, errno.ETIMEDOUT, errno.ECONNRESET): logging.error(traceback.format_exc())
class URLopener(urllib.URLopener): def open_http(self, url, data=None): """Use HTTP protocol.""" from eventlib.green import httplib user_passwd = None proxy_passwd = None if isinstance(url, str): host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url # check whether the proxy contains authorization information proxy_passwd, host = splituser(host) # now we proceed with the url we want to obtain urltype, rest = splittype(selector) url = rest user_passwd = None if urltype.lower() != 'http': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) if proxy_bypass(realhost): host = realhost #print "proxy via http:", host, selector if not host: raise IOError, ('http error', 'no host given') if proxy_passwd: import base64 proxy_auth = base64.b64encode(proxy_passwd).strip() else: proxy_auth = None if user_passwd: import base64 auth = base64.b64encode(user_passwd).strip() else: auth = None h = httplib.HTTP(host) if data is not None: h.putrequest('POST', selector) h.putheader('Content-Type', 'application/x-www-form-urlencoded') h.putheader('Content-Length', '%d' % len(data)) else: h.putrequest('GET', selector) if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth) if auth: h.putheader('Authorization', 'Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: h.putheader(*args) h.endheaders() if data is not None: h.send(data) errcode, errmsg, headers = h.getreply() if errcode == -1: # something went wrong with the HTTP status line raise IOError, ('http protocol error', 0, 'got a bad status line', None) fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, "http:" + url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) if hasattr(socket, "ssl"): def open_https(self, url, data=None): """Use HTTPS protocol.""" from eventlib.green import httplib user_passwd = None proxy_passwd = None if isinstance(url, str): host, selector = splithost(url) if host: user_passwd, host = splituser(host) host = unquote(host) realhost = host else: host, selector = url # here, we determine, whether the proxy contains authorization information proxy_passwd, host = splituser(host) urltype, rest = splittype(selector) url = rest user_passwd = None if urltype.lower() != 'https': realhost = None else: realhost, rest = splithost(rest) if realhost: user_passwd, realhost = splituser(realhost) if user_passwd: selector = "%s://%s%s" % (urltype, realhost, rest) #print "proxy via https:", host, selector if not host: raise IOError, ('https error', 'no host given') if proxy_passwd: import base64 proxy_auth = base64.b64encode(proxy_passwd).strip() else: proxy_auth = None if user_passwd: import base64 auth = base64.b64encode(user_passwd).strip() else: auth = None h = httplib.HTTPS(host, 0, key_file=self.key_file, cert_file=self.cert_file) if data is not None: h.putrequest('POST', selector) h.putheader('Content-Type', 'application/x-www-form-urlencoded') h.putheader('Content-Length', '%d' % len(data)) else: h.putrequest('GET', selector) if proxy_auth: h.putheader('Proxy-Authorization', 'Basic %s' % proxy_auth) if auth: h.putheader('Authorization', 'Basic %s' % auth) if realhost: h.putheader('Host', realhost) for args in self.addheaders: h.putheader(*args) h.endheaders() if data is not None: h.send(data) errcode, errmsg, headers = h.getreply() if errcode == -1: # something went wrong with the HTTP status line raise IOError, ('http protocol error', 0, 'got a bad status line', None) fp = h.getfile() if errcode == 200: return addinfourl(fp, headers, "https:" + url) else: if data is None: return self.http_error(url, fp, errcode, errmsg, headers) else: return self.http_error(url, fp, errcode, errmsg, headers, data) def open_gopher(self, url): """Use Gopher protocol.""" if not isinstance(url, str): raise IOError, ( 'gopher error', 'proxy support for gopher protocol currently not implemented') from eventlib.green import gopherlib host, selector = splithost(url) if not host: raise IOError, ('gopher error', 'no host given') host = unquote(host) type, selector = splitgophertype(selector) selector, query = splitquery(selector) selector = unquote(selector) if query: query = unquote(query) fp = gopherlib.send_query(selector, query, host) else: fp = gopherlib.send_selector(selector, host) return addinfourl(fp, noheaders(), "gopher:" + url) def open_local_file(self, url): """Use local file.""" import mimetypes, mimetools, email.Utils try: from cStringIO import StringIO except ImportError: from StringIO import StringIO host, file = splithost(url) localname = url2pathname(file) try: stats = os.stat(localname) except OSError, e: raise IOError(e.errno, e.strerror, e.filename) size = stats.st_size modified = email.Utils.formatdate(stats.st_mtime, usegmt=True) mtype = mimetypes.guess_type(url)[0] headers = mimetools.Message( StringIO( 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' % (mtype or 'text/plain', size, modified))) if not host: urlfile = file if file[:1] == '/': urlfile = 'file://' + file return addinfourl(open(localname, 'rb'), headers, urlfile) host, port = splitport(host) if not port \ and socket.gethostbyname(host) in (localhost(), thishost()): urlfile = file if file[:1] == '/': urlfile = 'file://' + file return addinfourl(open(localname, 'rb'), headers, urlfile) raise IOError, ('local file error', 'not on local host')
def handleMultipart(mainHeaders,msgStream): files = dict() params = dict() BOUNDARY = "--"+HttpMethodParser.extractBoundary(mainHeaders) stopBoundary = BOUNDARY+"--" terminateBoundary = '' msgStream.readline() #has an empty line at start that we want to get rid of while terminateBoundary != stopBoundary: headers = mimetools.Message(msgStream) terminateBoundary = '' log.log(cpc.util.log.TRACE,'multipart headers are %s'%headers.headers) if(ServerRequest.isFile(headers['Content-Disposition'])): file = tempfile.TemporaryFile(mode="w+b") name = ServerRequest.getFieldName(headers['Content-Disposition']) notused,contentDispositionParams = cgi.parse_header(headers['Content-Disposition']) name = contentDispositionParams['name'] #if we have a content length we just read it and store the data contentLength = headers.getheader('Content-Length') if(contentLength): # If a content length is sent we parse the nice way bytes = int(contentLength) if(ServerRequest.isFile(headers['Content-Disposition'])): file.write(msgStream.read(bytes)) else: line = msgStream.read(bytes) log.log(cpc.util.log.TRACE,"line is "+line) params[name] = line msgStream.readline() ## we will have a trailin CRLF that we just want to get rid of if(ServerRequest.isFile(headers['Content-Disposition'])): readBytes = 0 while(True): line = msgStream.readline() if re.search(BOUNDARY,line): #time to wrap it up if(line[-2:] == '\r\n'): line = line[:-2] elif(line[-1:] == '\n'): line = line[:-1] terminateBoundary = line file.seek(0) skipBytes = 2 realFile = tempfile.TemporaryFile(mode="w+b") realFile.write(file.read(readBytes-skipBytes)) file.close() realFile.seek(0) #For testing during dev only!! #runTest(realFile) files[name]= realFile break else: readBytes +=len(line) file.write(line) else: while(True): line = msgStream.readline() if(line[-2:] == '\r\n'): line = line[:-2] elif(line[-1:] == '\n'): line = line[:-1] if re.search(BOUNDARY,line): terminateBoundary = line break; else: if name in params: params[name]+= line else: params[name] = line return ServerRequest(mainHeaders,None,params,files)
def open_ftp(self, url): if not isinstance(url, str): raise IOError, ( 'ftp error', 'proxy support for ftp protocol currently not implemented') import mimetypes, mimetools try: from cStringIO import StringIO except ImportError: from StringIO import StringIO host, path = splithost(url) if not host: raise IOError, ('ftp error', 'no host given') host, port = splitport(host) user, host = splituser(host) if user: user, passwd = splitpasswd(user) else: passwd = None host = unquote(host) user = user or '' passwd = passwd or '' host = socket.gethostbyname(host) if not port: import ftplib port = ftplib.FTP_PORT else: port = int(port) path, attrs = splitattr(path) path = unquote(path) dirs = path.split('/') dirs, file = dirs[:-1], dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] if dirs and not dirs[0]: dirs[0] = '/' key = (user, host, port, '/'.join(dirs)) if len(self.ftpcache) > MAXFTPCACHE: for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() try: if key not in self.ftpcache: self.ftpcache[key] = ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: attr, value = splitvalue(attr) if attr.lower() == 'type' and value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() fp, retrlen = self.ftpcache[key].retrfile(file, type) mtype = mimetypes.guess_type('ftp:' + url)[0] headers = '' if mtype: headers += 'Content-Type: %s\n' % mtype if retrlen is not None and retrlen >= 0: headers += 'Content-Length: %d\n' % retrlen headers = mimetools.Message(StringIO(headers)) return addinfourl(fp, headers, 'ftp:' + url) except ftperrors() as msg: raise IOError, ('ftp error', msg), sys.exc_info()[2] return
def __build_message(self, json): message_lines = [] for header, value in json.iteritems(): message_lines.append("%s: %s" % (header, value)) return mimetools.Message(StringIO("\n".join(message_lines)))
"""HTTP/1.1 client library
def receive_handshake(msg, verify_handshake=None, userdata=None): index1, index2 = msg.find('\n\n'), msg.find( '\n\r\n') # handle both LFLF and CRLFCRLF if index2 > 0 and index1 > 0: index = (index1 + 2) if index1 < index2 else (index2 + 3) elif index1 > 0: index = index1 + 2 elif index2 > 0: index = index2 + 3 else: logger.debug('no CRLF found') return (None, msg, '') # not enough header data yet # verify if enough data is available for content-length, if any match = re.search(r'content-length\s*:\s*(\d+)\r?\n', msg[:index].lower()) length = int(match.group(1)) if match else 0 if len(msg) < index + length: logger.debug('has more content %d < %d (%d+%d)', len(msg), index + length, index, length) return (None, msg, '') # pending further content. # extract the first HTTP request, and store remaining as pending data, body, msg = msg[:index], msg[index:index + length], msg[index + length:] try: firstline, data = data.split('\n', 1) firstline = firstline.rstrip() headers = mimetools.Message(StringIO(data)) # validate firstline and some headers method, path, protocol = firstline.split(' ', 2) if method != 'GET': raise HTTPError('405 Method Not Allowed') if protocol != "HTTP/1.1": raise HTTPError('505 HTTP Version Not Supported') if headers.get('Upgrade', None) != 'websocket': raise HTTPError('403 Forbidden', 'missing or invalid Upgrade header') if headers.get('Connection', None) != 'Upgrade': raise HTTPError('400 Bad Request', 'missing or invalid Connection header') if 'Sec-WebSocket-Key' not in headers: raise HTTPError('400 Bad Request', 'missing Sec-WebSocket-Key header') if int(headers.get('Sec-WebSocket-Version', '0')) < 13: raise HTTPError('400 Bad Request', 'missing or unsupported Sec-WebSocket-Version') result = None # invoke app below for result if needed if verify_handshake is not None and callable(verify_handshake): try: result = verify_handshake(userdata=userdata, path=path, headers=headers) except HTTPError: raise # re-raise only HTTPError, and mask all others except: logger.exception('exception in server app: verify_handshake') raise HTTPError('500 Server Error', 'exception in server app: verify_handshake') # generate the response, and append result returned by onhandshake if applicable key = headers['Sec-WebSocket-Key'] digest = base64.b64encode( hashlib.sha1(key + _magic).hexdigest().decode('hex')) response = [ 'HTTP/1.1 101 Switching Protocols', 'Upgrade: websocket', 'Connection: Upgrade', 'Sec-WebSocket-Accept: %s' % digest ] if result: response.extend(result) response = '\r\n'.join( response) + '\r\n\r\n' # we always respond with CRLF line ending return (response, msg, path) except HTTPError, e: return (str(e), msg, '') # send error response
def test_monkey_patch_mimetools(self): sio = StringIO('blah') self.assertEquals(mimetools.Message(sio).type, 'text/plain') sio = StringIO('blah') self.assertEquals(mimetools.Message(sio).plisttext, '') sio = StringIO('blah') self.assertEquals(mimetools.Message(sio).maintype, 'text') sio = StringIO('blah') self.assertEquals(mimetools.Message(sio).subtype, 'plain') sio = StringIO('Content-Type: text/html; charset=ISO-8859-4') self.assertEquals(mimetools.Message(sio).type, 'text/html') sio = StringIO('Content-Type: text/html; charset=ISO-8859-4') self.assertEquals(mimetools.Message(sio).plisttext, '; charset=ISO-8859-4') sio = StringIO('Content-Type: text/html; charset=ISO-8859-4') self.assertEquals(mimetools.Message(sio).maintype, 'text') sio = StringIO('Content-Type: text/html; charset=ISO-8859-4') self.assertEquals(mimetools.Message(sio).subtype, 'html') wsgi.monkey_patch_mimetools() sio = StringIO('blah') self.assertEquals(mimetools.Message(sio).type, None) sio = StringIO('blah') self.assertEquals(mimetools.Message(sio).plisttext, '') sio = StringIO('blah') self.assertEquals(mimetools.Message(sio).maintype, None) sio = StringIO('blah') self.assertEquals(mimetools.Message(sio).subtype, None) sio = StringIO('Content-Type: text/html; charset=ISO-8859-4') self.assertEquals(mimetools.Message(sio).type, 'text/html') sio = StringIO('Content-Type: text/html; charset=ISO-8859-4') self.assertEquals(mimetools.Message(sio).plisttext, '; charset=ISO-8859-4') sio = StringIO('Content-Type: text/html; charset=ISO-8859-4') self.assertEquals(mimetools.Message(sio).maintype, 'text') sio = StringIO('Content-Type: text/html; charset=ISO-8859-4') self.assertEquals(mimetools.Message(sio).subtype, 'html')
def info(self): return mimetools.Message(StringIO.StringIO(self.cookie_data))
def _get_msg(self): if self._msg is None: fp = StringIO.StringIO("\r\n".join(map(lambda (k,v): "%s: %s" % (k,v), self.response.headers.get_all()))) self._msg = mimetools.Message(fp) return self._msg
def unpackMultifile(multifile, attachments=None): """ Unpack multifile into plainbody, content-type, htmlbody and attachments. """ if attachments is None: attachments = [] textBody = htmlBody = contentType = '' msg = mimetools.Message(multifile) maintype = msg.getmaintype() subtype = msg.getsubtype() name = msg.getparam('name') if not name: # Check for disposition header (RFC:1806) disposition = msg.getheader('Content-Disposition') if disposition: matchObj = re.search('(?i)filename="*(?P<filename>[^\s"]*)"*', disposition) if matchObj: name = matchObj.group('filename') # Recurse over all nested multiparts if maintype == 'multipart': multifile.push(msg.getparam('boundary')) multifile.readlines() while not multifile.last: multifile.next() (tmpTextBody, tmpContentType, tmpHtmlBody, tmpAttachments) = \ unpackMultifile(multifile, attachments) # Return ContentType only for the plain-body of a mail if tmpContentType: # and not textBody: textBody += tmpTextBody contentType = tmpContentType if tmpHtmlBody: htmlBody = tmpHtmlBody if tmpAttachments: attachments = tmpAttachments multifile.pop() return (textBody, contentType, htmlBody, attachments) # Process MIME-encoded data plainfile = StringIO.StringIO() try: mimetools.decode(multifile, plainfile, msg.getencoding()) # unknown or no encoding? 7bit, 8bit or whatever... copy literal except ValueError: mimetools.copyliteral(multifile, plainfile) body = plainfile.getvalue() plainfile.close() # Get plain text if maintype == 'text' and subtype == 'plain' and not name: textBody = body contentType = msg.get('content-type', 'text/plain') else: # No name? This should be the html-body... if not name: name = '%s.%s' % (maintype, subtype) htmlBody = body attachments.append({ 'filename': mime_decode_header(name), 'filebody': body, 'maintype': maintype, 'subtype': subtype }) return (textBody, contentType, htmlBody, attachments)
sock.connect((self.host, self.port)) sock.send(rq) reply = sock.makefile('rb') sock = None line = reply.readline() try: [ver, ec, em] = split(line, None, 2) except ValueError: raise 'BadReply', 'Bad reply from server: ' + line if ver[:5] != 'HTTP/': raise 'BadReply', 'Bad reply from server: ' + line ec = atoi(ec) em = strip(em) headers = mimetools.Message(reply, 0) response = reply.read() finally: if 0: raise NotAvailable, (RemoteException(NotAvailable, sys.exc_info()[1], self.url, '<MultiPart Form>')) if ec == 200: return (headers, response) self.handleError('', ec, em, headers, response) class Object: """Surrogate object for an object on the web""" username = None
def open_ftp(self, url): import mimetypes import mimetools import StringIO (host, path) = splithost(url) if not host: raise IOError, ('ftp error', 'no host given') (host, port) = splitport(host) (user, host) = splituser(host) if user: (user, passwd) = splitpasswd(user) else: passwd = None host = unquote(host) if not user: pass user = unquote('') if not passwd: pass passwd = unquote('') host = socket.gethostbyname(host) if not port: import ftplib as ftplib port = ftplib.FTP_PORT else: port = int(port) (path, attrs) = splitattr(path) path = unquote(path) dirs = path.split('/') dirs = dirs[:-1] file = dirs[-1] if dirs and not dirs[0]: dirs = dirs[1:] if dirs and not dirs[0]: dirs[0] = '/' key = (user, host, port, '/'.join(dirs)) if len(self.ftpcache) > MAXFTPCACHE: for k in self.ftpcache.keys(): if k != key: v = self.ftpcache[k] del self.ftpcache[k] v.close() continue try: if key not in self.ftpcache: self.ftpcache[key] = ftpwrapper(user, passwd, host, port, dirs) if not file: type = 'D' else: type = 'I' for attr in attrs: (attr, value) = splitvalue(attr) if attr.lower() == 'type' and value in ('a', 'A', 'i', 'I', 'd', 'D'): type = value.upper() continue (fp, retrlen) = self.ftpcache[key].retrfile(file, type) mtype = mimetypes.guess_type('ftp:' + url)[0] headers = '' if mtype: headers += 'Content-Type: %s\n' % mtype if retrlen is not None and retrlen >= 0: headers += 'Content-Length: %d\n' % retrlen headers = mimetools.Message(StringIO.StringIO(headers)) return addinfourl(fp, headers, 'ftp:' + url) except ftperrors(): msg = None raise IOError, ('ftp error', msg), sys.exc_info()[2]