def test304(self): self.request.uri = '/index.html' self.handler._setpath(self.request) self.handler.mode = 'deployment' mtime = os.stat(self.request.path)[stat.ST_MTIME] ims = 'If-Modified-Since: %s' % http_date.build_http_date(mtime) self.request.header.insert(len(self.request.header), ims) try: self.handler._getstatic(self.request) except RequestError, error: pass
def add_last_modified_headers(request, repo): request['X-UpLib-Repository-Last-Modified'] = http_date.build_http_date(repo.mod_time()) wants_doc_changes = get_header(DOC_CHANGES_SINCE_HEADER, request.header) if wants_doc_changes: try: wants_doc_changes = float(wants_doc_changes.strip()) except: pass else: docs = repo.get_touched_since(wants_doc_changes) request['X-UpLib-Docs-Modified'] = str(wants_doc_changes) + ";" + ";".join(["%s,%s" % (doc.id, doc.touch_time()) for doc in docs])
def handle_request(self, request): if request.command != 'GET': request.error (400) # bad request return path, params, query, fragment = request.split_uri() if '%' in path: path = http_server.unquote(path) # strip off all leading slashes while path and path[0] == '/': path = path[1:] path, process_name_and_channel = path.split('/', 1) try: process_name, channel = process_name_and_channel.split('/', 1) except ValueError: # no channel specified, default channel to stdout process_name = process_name_and_channel channel = 'stdout' from options import split_namespec group_name, process_name = split_namespec(process_name) group = self.supervisord.process_groups.get(group_name) if group is None: request.error(404) # not found return process = group.processes.get(process_name) if process is None: request.error(404) # not found return logfile = getattr(process.config, '%s_logfile' % channel, None) if logfile is None or not os.path.exists(logfile): # XXX problematic: processes that don't start won't have a log # file and we probably don't want to go into fatal state if we try # to read the log of a process that did not start. request.error(410) # gone return mtime = os.stat(logfile)[stat.ST_MTIME] request['Last-Modified'] = http_date.build_http_date(mtime) request['Content-Type'] = 'text/plain' # the lack of a Content-Length header makes the outputter # send a 'Transfer-Encoding: chunked' response request.push(tail_f_producer(request, logfile, 1024)) request.done()
def _getstatic(self, request): """Process a request for a static resource. This method can raise 304. """ # Serve a 304 if appropriate. # =========================== mtime = os.stat(request.path)[stat.ST_MTIME] content_length = os.stat(request.path)[stat.ST_SIZE] if self.mode == 'deployment': ims = get_header_match(IF_MODIFIED_SINCE, request.header) length_match = True if ims: length = ims.group(4) if length: try: length = int(length) if length != content_length: length_match = False except: pass ims_date = False if ims: ims_date = http_date.parse_http_date(ims.group(1)) if length_match and ims_date: if mtime <= ims_date: raise RequestError(304) # Set headers and return content. # =============================== content = open(request.path, 'rb').read() request['Last-Modified'] = http_date.build_http_date(mtime) request['Content-Length'] = content_length request['Content-Type'] = guess_type(request.path)[0] or 'text/plain' return content
def return_file (self, typ, path, delete_on_close=false, filename=None): stats = os.stat(path) if delete_on_close: fp = self_deleting_file(path, 'rb') else: fp = open(path, 'rb') self.request['Content-Type'] = (isinstance(typ, unicode) and typ.encode("ASCII", "replace")) or typ self.request['Content-Length'] = stats.st_size self.request['Last-Modified'] = http_date.build_http_date(stats.st_mtime) add_last_modified_headers(self.request, self.repo) if filename: filename = (isinstance(filename, unicode) and filename.encode("ASCII", "replace")) or filename self.request["Content-Disposition"] = "inline; filename=%s" % filename elif not delete_on_close: filename = os.path.split(path)[1] filename = (isinstance(filename, unicode) and filename.encode("ASCII", "replace")) or filename self.request["Content-Disposition"] = "inline; filename=%s" % filename if (self.request.command != "HEAD"): self.request.push(producers.file_producer(fp))
def handle_request(self, request): if request.command != 'GET': request.error (400) # bad request return logfile = self.supervisord.options.logfile if logfile is None or not os.path.exists(logfile): request.error(410) # gone return mtime = os.stat(logfile)[stat.ST_MTIME] request['Last-Modified'] = http_date.build_http_date(mtime) request['Content-Type'] = 'text/plain' # the lack of a Content-Length header makes the outputter # send a 'Transfer-Encoding: chunked' response request.push(tail_f_producer(request, logfile, 1024)) request.done()
def __str__(self, html_search=re.compile('<html>',re.I).search, ): if self._wrote: if self._chunking: return '0\r\n\r\n' else: return '' headers=self.headers body=self.body # set 204 (no content) status if 200 and response is empty # and not streaming if not headers.has_key('content-type') and \ not headers.has_key('content-length') and \ not self._streaming and \ self.status == 200: self.setStatus('nocontent') # add content length if not streaming if not headers.has_key('content-length') and \ not self._streaming: self.setHeader('content-length',len(body)) content_length= headers.get('content-length', None) if content_length>0 : self.setHeader('content-length', content_length) headersl=[] append=headersl.append status=headers.get('status', '200 OK') # status header must come first. append("HTTP/%s %s" % (self._http_version or '1.0' , status)) if headers.has_key('status'): del headers['status'] if not headers.has_key("Etag"): self.setHeader('Etag','') # add zserver headers append('Server: %s' % self._server_version) append('Date: %s' % build_http_date(time.time())) if self._http_version=='1.0': if self._http_connection=='keep-alive' and \ self.headers.has_key('content-length'): self.setHeader('Connection','Keep-Alive') else: self.setHeader('Connection','close') # Close the connection if we have been asked to. # Use chunking if streaming output. if self._http_version=='1.1': if self._http_connection=='close': self.setHeader('Connection','close') elif not self.headers.has_key('content-length'): if self.http_chunk and self._streaming: self.setHeader('Transfer-Encoding','chunked') self._chunking=1 else: self.setHeader('Connection','close') for key, val in headers.items(): if key.lower()==key: # only change non-literal header names key="%s%s" % (key[:1].upper(), key[1:]) start=0 l=key.find('-',start) while l >= start: key="%s-%s%s" % (key[:l],key[l+1:l+2].upper(),key[l+2:]) start=l+1 l=key.find('-',start) append("%s: %s" % (key, val)) if self.cookies: headersl=headersl+self._cookie_list() headersl[len(headersl):]=[self.accumulated_headers, body] return "\r\n".join(headersl)
def handle_request(self, request): """Handle an HTTP request. """ # # Re-init for dev mode. # # ===================== # # if self.dev_mode and 0: # self.__pre__() # Command # ======= if request.command not in self.valid_commands: request.error(400) # bad request return # Path # ==== # parse the uri -- only ever contains path and query(?) scheme, name, path, query, fragment = urlparse.urlsplit(request.uri) # tidy up the path if '%' in path: path = urllib.unquote(path) path = os.path.join(self.root, path.lstrip('/')) path = os.path.realpath(path) # # Applications # # ============ # # determine if the url belongs to one of our apps # # if so then hand off control flow to the application # # for p in self.app_paths: # if path.startswith(p): # app = self.apps[p] # app(request) # return # Pages & Static Content # ====================== # see if the path is valid if not os.path.exists(path): request.error(404) return elif not path.startswith(self.root): # protect against ./../../../ request.error(400) return # if the path points to a directory, look for a default obj if os.path.isdir(path): # look for a default object found = False for name in self.defaults: _path = os.path.join(path, name) if os.path.isfile(_path): found = True path = _path break if not found: # no default object request.error(404) return # save this for later use in state.py request.path = path # Decide if the content has changed recently. # =========================================== mtime = os.stat(path)[stat.ST_MTIME] content_length = os.stat(path)[stat.ST_SIZE] if not self.dev_mode: ims = get_header_match(IF_MODIFIED_SINCE, request.header) length_match = True if ims: length = ims.group(4) if length: try: length = string.atoi(length) if length != content_length: length_match = False except: pass ims_date = False if ims: ims_date = http_date.parse_http_date(ims.group(1)) if length_match and ims_date: if mtime <= ims_date: request.reply_code = 304 request.done() return # Actually serve the content. # =========================== # pages if path.endswith('.pt'): template = self.templates.getXMLTemplate(path) content = self._render_pt(request, template) #request['Last-Modified'] = http_date.build_http_date(mtime) #request['Content-Length'] = len(content) request['Content-Type'] = 'text/html' if request.command == 'GET': request.push(self.producer(content)) request.done() return # static content else: content = file(path, 'rb').read() request['Last-Modified'] = http_date.build_http_date(mtime) request['Content-Length'] = content_length self.set_content_type(path, request) if request.command == 'GET': request.push(self.producer(content)) request.done() return
def __str__(self, html_search=re.compile('<html>',re.I).search, ): if self._wrote: if self._chunking: return '0\r\n\r\n' else: return '' headers=self.headers body=self.body # set 204 (no content) status if 200 and response is empty # and not streaming if not headers.has_key('content-type') and \ not headers.has_key('content-length') and \ not self._streaming and \ self.status == 200: self.setStatus('nocontent') if self.status in (100, 101, 102, 204, 304): # These responses should not have any body or Content-Length. # See RFC 2616 4.4 "Message Length". body = '' if 'content-length' in headers: del headers['content-length'] if 'content-type' in headers: del headers['content-type'] elif not headers.has_key('content-length') and not self._streaming: self.setHeader('content-length', len(body)) headersl=[] append=headersl.append status=headers.get('status', '200 OK') # status header must come first. append("HTTP/%s %s" % (self._http_version or '1.0' , status)) if headers.has_key('status'): del headers['status'] # add zserver headers append('Server: %s' % self._server_version) append('Date: %s' % build_http_date(time.time())) if self._http_version=='1.0': if self._http_connection=='keep-alive': self.setHeader('Connection','Keep-Alive') else: self.setHeader('Connection','close') # Close the connection if we have been asked to. # Use chunking if streaming output. if self._http_version=='1.1': if self._http_connection=='close': self.setHeader('Connection','close') elif (not self.headers.has_key('content-length') and self.http_chunk and self._streaming): self.setHeader('Transfer-Encoding','chunked') self._chunking=1 headers = headers.items() for line in self.accumulated_headers.splitlines(): if line[0] == '\t': headers[-1][1] += '\n' + line continue headers.append(line.split(': ', 1)) for key, val in headers: if key.lower()==key: # only change non-literal header names key="%s%s" % (key[:1].upper(), key[1:]) start=0 l=key.find('-',start) while l >= start: key="%s-%s%s" % (key[:l],key[l+1:l+2].upper(),key[l+2:]) start=l+1 l=key.find('-',start) val = val.replace('\n\t', '\r\n\t') append("%s: %s" % (key, val)) if self.cookies: headersl.extend(self._cookie_list()) append('') append(body) return "\r\n".join(headersl)
# -*- Mode: Python -*- import socket import string import time from medusa import http_date now = http_date.build_http_date (time.time()) cache_request = string.joinfields ( ['GET / HTTP/1.0', 'If-Modified-Since: %s' % now, ], '\r\n' ) + '\r\n\r\n' nocache_request = 'GET / HTTP/1.0\r\n\r\n' def get (request, host='', port=80): s = socket.socket (socket.AF_INET, socket.SOCK_STREAM) s.connect((host, port)) s.send (request) while 1: d = s.recv (8192) if not d: break s.close() class timer: def __init__ (self): self.start = time.time()
def __str__( self, html_search=re.compile('<html>', re.I).search, ): if self._wrote: if self._chunking: return '0\r\n\r\n' else: return '' headers = self.headers body = self.body # set 204 (no content) status if 200 and response is empty # and not streaming if not headers.has_key('content-type') and \ not headers.has_key('content-length') and \ not self._streaming and \ self.status == 200: self.setStatus('nocontent') if self.status in (100, 101, 102, 204, 304): # These responses should not have any body or Content-Length. # See RFC 2616 4.4 "Message Length". body = '' if 'content-length' in headers: del headers['content-length'] if 'content-type' in headers: del headers['content-type'] elif not headers.has_key('content-length') and not self._streaming: self.setHeader('content-length', len(body)) headersl = [] append = headersl.append status = headers.get('status', '200 OK') # status header must come first. append("HTTP/%s %s" % (self._http_version or '1.0', status)) if headers.has_key('status'): del headers['status'] # add zserver headers append('Server: %s' % self._server_version) append('Date: %s' % build_http_date(time.time())) if self._http_version == '1.0': if self._http_connection == 'keep-alive': self.setHeader('Connection', 'Keep-Alive') else: self.setHeader('Connection', 'close') # Close the connection if we have been asked to. # Use chunking if streaming output. if self._http_version == '1.1': if self._http_connection == 'close': self.setHeader('Connection', 'close') elif (not self.headers.has_key('content-length') and self.http_chunk and self._streaming): self.setHeader('Transfer-Encoding', 'chunked') self._chunking = 1 headers = headers.items() for line in self.accumulated_headers.splitlines(): if line[0] == '\t': headers[-1][1] += '\n' + line continue headers.append(line.split(': ', 1)) for key, val in headers: if key.lower() == key: # only change non-literal header names key = "%s%s" % (key[:1].upper(), key[1:]) start = 0 l = key.find('-', start) while l >= start: key = "%s-%s%s" % (key[:l], key[l + 1:l + 2].upper(), key[l + 2:]) start = l + 1 l = key.find('-', start) val = val.replace('\n\t', '\r\n\t') append("%s: %s" % (key, val)) if self.cookies: headersl.extend(self._cookie_list()) append('') append(body) return "\r\n".join(headersl)