def handle_request(self, request): path = request._path filename = os.path.join(self.doc_root, path[1:]) if request._method not in ('get', 'head'): request.error(405) return if os.path.isdir(filename): filename = os.path.join(filename, 'index.html') if not os.path.isfile(filename): request.error(404) else: stat_info = os.stat(filename) mtime = stat_info[stat.ST_MTIME] file_length = stat_info[stat.ST_SIZE] ims = request.get_request_header('if-modified-since') if ims: length_match = 1 m = self.crack_if_modified_since.match(ims) if m: length = m.group(3) if length: if int(length) != file_length: length_match = 0 ims_date = http_date.parse_http_date(m.group(1)) if length_match and ims_date: if mtime <= ims_date: request.error(304, with_body=0) return base, ext = os.path.splitext(filename) ext = ext[1:].lower() request['Content-Type'] = mime_type_table.content_type_map.get( ext, 'text/plain') request['Last-Modified'] = http_date.build_http_date(mtime) if request._method == 'get': f = open(filename, 'rb') block = f.read(32768) if not block: request.error(204) # no content else: while 1: request.push(block) block = f.read(32768) if not block: break elif request._method == 'head': pass else: # should be impossible request.error(405)
def __init__(self, *args): # unpack information about the request (self.channel, self.request, self.command, self.uri, self.version, self.header) = args self.outgoing = [] self.reply_headers = {"Server": "Medusa/%s" % VERSION_STRING, "Date": http_date.build_http_date(time.time())} self.request_number = http_request.request_counter.increment() self._split_uri = None self._header_cache = {}
def handle_request (self, request): path = request._path filename = os.path.join (self.doc_root, path[1:]) if request._method not in ('get', 'head'): request.error (405) return if os.path.isdir (filename): filename = os.path.join (filename, 'index.html') if not os.path.isfile (filename): request.error (404) else: stat_info = os.stat (filename) mtime = stat_info[stat.ST_MTIME] file_length = stat_info[stat.ST_SIZE] ims = request.get_request_header ('if-modified-since') if ims: length_match = 1 m = self.crack_if_modified_since.match (ims) if m: length = m.group (3) if length: if int(length) != file_length: length_match = 0 ims_date = http_date.parse_http_date (m.group(1)) if length_match and ims_date: if mtime <= ims_date: request.error (304, with_body=0) return base, ext = os.path.splitext (filename) ext = ext[1:].lower() request['Content-Type'] = mime_type_table.content_type_map.get (ext, 'text/plain') request['Last-Modified'] = http_date.build_http_date (mtime) if request._method == 'get': f = open (filename, 'rb') block = f.read (32768) if not block: request.error (204) # no content else: while 1: request.push (block) block = f.read (32768) if not block: break elif request._method == 'head': pass else: # should be impossible request.error (405)
def __init__(self, *args): # unpack information about the request (self.channel, self.request, self.command, self.uri, self.version, self.header) = args self.outgoing = fifo() self.reply_headers = { 'Server': 'Medusa/%s' % VERSION_STRING, 'Date': http_date.build_http_date(time.time()) } self.request_number = http_request.request_counter.increment() self._split_uri = None self._header_cache = {}
def get_headers (self): chunking = 0 # here is were we decide things like keep-alive, 1.0 vs 1.1, chunking, etc. connection = self.get_request_header('connection').lower() connection_tokens = [ x.strip() for x in connection.split(',')] close_it = 0 if self._version == '1.0': if 'keep-alive' in connection_tokens: if not self.has_key ('content-length'): close_it = 1 else: self['Connection'] = 'Keep-Alive' else: close_it = 1 elif self._version == '1.1': if 'close' in connection_tokens: close_it = 1 elif not self.has_key ('content-length'): if self.has_key ('transfer-encoding'): if self['Transfer-Encoding'] == 'chunked': chunking = 1 else: close_it = 1 else: self['Transfer-Encoding'] = 'chunked' chunking = 1 elif self._version == '0.9': close_it = 1 if close_it: self['Connection'] = 'close' self._close = 1 self._chunking = chunking self['Server'] = 'IronPort httpd/%s' % __version__ self['Date'] = http_date.build_http_date (coro.now_usec / coro.microseconds) headers = [self.response (self._reply_code)] + [ ('%s: %s' % x) for x in self._reply_headers.items() ] + [ (x.output()) for x in self._reply_cookies ] + ['\r\n'] #print '\n'.join (headers) + '\n\n' return '\r\n'.join (headers)
def get_headers(self): chunking = 0 # here is were we decide things like keep-alive, 1.0 vs 1.1, chunking, etc. connection = self.get_request_header('connection').lower() connection_tokens = [x.strip() for x in connection.split(',')] close_it = 0 if self._version == '1.0': if 'keep-alive' in connection_tokens: if not self.has_key('content-length'): close_it = 1 else: self['Connection'] = 'Keep-Alive' else: close_it = 1 elif self._version == '1.1': if 'close' in connection_tokens: close_it = 1 elif not self.has_key('content-length'): if self.has_key('transfer-encoding'): if self['Transfer-Encoding'] == 'chunked': chunking = 1 else: close_it = 1 else: self['Transfer-Encoding'] = 'chunked' chunking = 1 elif self._version == '0.9': close_it = 1 if close_it: self['Connection'] = 'close' self._close = 1 self._chunking = chunking self['Server'] = 'IronPort httpd/%s' % __version__ self['Date'] = http_date.build_http_date(coro.now_usec / coro.microseconds) headers = [self.response(self._reply_code)] + [ ('%s: %s' % x) for x in self._reply_headers.items() ] + [(x.output()) for x in self._reply_cookies] + ['\r\n'] #print '\n'.join (headers) + '\n\n' return '\r\n'.join(headers)
def __init__ (self, *args): # unpack information about the request (self.channel, self.request, self.command, self.uri, self.version, self.header) = args self.outgoing = [] self.reply_headers = { 'Server' : 'Medusa/%s' % VERSION_STRING, 'Date' : http_date.build_http_date (time.time()) } # New reply header list (to support multiple # headers with same name) self.__reply_header_list = [] self.request_number = http_request.request_counter.increment() self._split_uri = None self._header_cache = {}
def get_headers(self): chunked = False # here is were we decide things like keep-alive, 1.0 vs 1.1, chunking, etc. hi = self.request_headers ho = self.reply_headers connection = hi.get_one('connection') if connection: connection_tokens = [x.strip() for x in connection.split(',')] else: connection_tokens = () close_it = False if self.version == '1.1': if b'close' in connection_tokens: close_it = True elif not ho.get_one('content-length'): ho['transfer-encoding'] = 'chunked' chunked = True elif self.version == '1.0': if 'keep-alive' in connection_tokens: if not ho.get_one('content-length'): close_it = True else: ho['connection'] = 'keep-alive' else: close_it = True elif self.version == '0.9': close_it = True if close_it: ho['connection'] = 'close' self.chunking = chunked self.close = close_it ho['server'] = 'shrapnel httpd/%s' % __version__ ho['date'] = http_date.build_http_date(coro.now_usec / coro.microseconds) return self.response(self.reply_code) + '\r\n' + str( self.reply_headers) + '\r\n'
def get_headers (self): chunked = False # here is were we decide things like keep-alive, 1.0 vs 1.1, chunking, etc. hi = self.request_headers ho = self.reply_headers connection = hi.get_one('connection') if connection: connection_tokens = [x.strip() for x in connection.split(',')] else: connection_tokens = () close_it = False if self.version == '1.1': if 'close' in connection_tokens: close_it = True elif not ho.get_one ('content-length'): ho['transfer-encoding'] = 'chunked' chunked = True elif self.version == '1.0': if 'keep-alive' in connection_tokens: if not ho.get_one ('content-length'): close_it = True else: ho['connection'] = 'keep-alive' else: close_it = True elif self.version == '0.9': close_it = True if close_it: ho['connection'] = 'close' self.chunking = chunked self.close = close_it ho['server'] = 'shrapnel httpd/%s' % __version__ ho['date'] = http_date.build_http_date (coro.now_usec / coro.microseconds) return self.response (self.reply_code) + '\r\n' + str (self.reply_headers) + '\r\n'
def handle_request(self, request): if request.command not in self.valid_commands: request.error(400) # bad request return self.hit_counter.increment() path, params, query, fragment = request.split_uri() if '%' in path: path = unquote(path) # strip off all leading slashes while path and path[0] == '/': path = path[1:] if self.filesystem.isdir(path): if path and path[-1] != '/': request['Location'] = 'http://%s/%s/' % ( request.channel.server.server_name, path) request.error(301) return # we could also generate a directory listing here, # may want to move this into another method for that # purpose found = 0 if path and path[-1] != '/': path = path + '/' for default in self.directory_defaults: p = path + default if self.filesystem.isfile(p): path = p found = 1 break if not found: request.error(404) # Not Found return elif not self.filesystem.isfile(path): request.error(404) # Not Found return file_length = self.filesystem.stat(path)[stat.ST_SIZE] ims = get_header_match(IF_MODIFIED_SINCE, request.header) length_match = 1 if ims: length = ims.group(4) if length: try: length = string.atoi(length) if length != file_length: length_match = 0 except: pass ims_date = 0 if ims: ims_date = http_date.parse_http_date(ims.group(1)) try: mtime = self.filesystem.stat(path)[stat.ST_MTIME] except: request.error(404) return if length_match and ims_date: if mtime <= ims_date: request.reply_code = 304 request.done() self.cache_counter.increment() return try: file = self.filesystem.open(path, 'rb') except IOError: request.error(404) return request['Last-Modified'] = http_date.build_http_date(mtime) request['Content-Length'] = file_length self.set_content_type(path, request) if request.command == 'get': request.push(self.default_file_producer(file)) self.file_counter.increment() request.done()
# -*- Mode: Python; tab-width: 4 -*- import socket import string import time import http_date now = http_date.build_http_date(time.time()) cache_request = string.joinfields([ 'GET / HTTP/1.0', 'If-Modified-Since: %s' % now, ], '\r\n') + '\r\n\r\n' nocache_request = 'GET / HTTP/1.0\r\n\r\n' def get(request, host='', port=80): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) s.connect(host, port) s.send(request) while 1: d = s.recv(8192) if not d: break s.close() class timer: def __init__(self): self.start = time.time()
def handle_request (self, request): if request.command not in self.valid_commands: request.error (400) # bad request return self.hit_counter.increment() path, params, query, fragment = request.split_uri() if '%' in path: path = unquote (path) # strip off all leading slashes while path and path[0] == '/': path = path[1:] if self.filesystem.isdir (path): if path and path[-1] != '/': request['Location'] = 'http://%s/%s/' % ( request.channel.server.server_name, path ) request.error (301) return # we could also generate a directory listing here, # may want to move this into another method for that # purpose found = 0 if path and path[-1] != '/': path = path + '/' for default in self.directory_defaults: p = path + default if self.filesystem.isfile (p): path = p found = 1 break if not found: request.error (404) # Not Found return elif not self.filesystem.isfile (path): request.error (404) # Not Found return file_length = self.filesystem.stat (path)[stat.ST_SIZE] ims = get_header_match (IF_MODIFIED_SINCE, request.header) length_match = 1 if ims: length = ims.group (4) if length: try: length = string.atoi (length) if length != file_length: length_match = 0 except: pass ims_date = 0 if ims: ims_date = http_date.parse_http_date (ims.group (1)) try: mtime = self.filesystem.stat (path)[stat.ST_MTIME] except: request.error (404) return if length_match and ims_date: if mtime <= ims_date: request.reply_code = 304 request.done() self.cache_counter.increment() return try: file = self.filesystem.open (path, 'rb') except IOError: request.error (404) return request['Last-Modified'] = http_date.build_http_date (mtime) request['Content-Length'] = file_length self.set_content_type (path, request) if request.command == 'get': request.push (self.default_file_producer (file)) self.file_counter.increment() request.done()
# -*- Mode: Python; tab-width: 4 -*- import socket import string import time import http_date now = http_date.build_http_date (time.time()) cache_request = string.joinfields ( ['GET / HTTP/1.0', 'If-Modified-Since: %s' % now, ], '\r\n' ) + '\r\n\r\n' nocache_request = 'GET / HTTP/1.0\r\n\r\n' def get (request, host='', port=80): s = socket.socket (socket.AF_INET, socket.SOCK_STREAM) s.connect (host, port) s.send (request) while 1: d = s.recv (8192) if not d: break s.close() class timer: def __init__ (self): self.start = time.time()