def fetch(self, environ): if environ.get('REQUEST_METHOD', 'GET') not in self.allowed_methods: return request_headers = list(parse_headers(environ)) # if a Cache-Control/Pragma: no-cache header is in the request, # and if honor_shift_reload is true, we don't serve it from cache if self.honor_shift_reload: if self._check_no_cache(request_headers, environ): return # we don't try to serve range requests up from the cache if header_value(request_headers, 'Range'): return # we don't try to serve conditional requests up from cache for conditional in ('If-Modified-Since', 'If-None-Match', 'If-Match'): # XXX other conditionals? if header_value(request_headers, conditional): return url = construct_url(environ) entries = self.storage.fetch(url) if entries: matching = self._discriminate(entries, request_headers, environ) if not matching: return now = time.time() discrims, expires, status, response_headers, body, extras = matching if expires > now: return status, response_headers, body
def filter(self, environ, headers, data): status = environ.pop('flexfilter.status') if status == HTTPTemporaryRedirect().status: response = ''' <error> <code>%s</code> <location>%s</location> <message>%s</message> </error> ''' % (status, header_value(headers, 'Location'), header_value(headers, 'Warning')) replace_header(headers, 'Content-Length', len(response)) else: root = etree.HTML(data) message = escape(etree.tostring(root.find('.//body'), method="text").strip()) if not message: message = root.find('.//title').text details = "" code_node = root.find('.//code') if code_node is not None and code_node.text is not None: details = escape(code_node.text) # Shorten a bit. pos = details.find(',') if pos != -1: details = details[:pos] response = ''' <error> <code>%s</code> <message>%s</message> <details>%s</details> </error> ''' % (status, message, details) replace_header(headers, 'Content-Length', len(response)) return response
def endtoend(headers): connection_header = header_value(headers, 'Connection') or '' hop_by_hop = [x.strip().lower() for x in connection_header.split(',')] hop_by_hop.extend(HOP_BY_HOP) header_names = [header[0] for header in headers] return [(name, header_value(headers, name)) for name in header_names if name.lower() not in hop_by_hop]
def endtoend(headers): connection_header = header_value(headers, 'Connection') or '' hop_by_hop = [x.strip().lower() for x in connection_header.split(',')] hop_by_hop.extend(HOP_BY_HOP) header_names = [ header[0] for header in headers ] return [(name, header_value(headers, name)) for name in header_names if name.lower() not in hop_by_hop]
def store(self, status, response_headers, environ): request_headers = list(parse_headers(environ)) # abort if we shouldn't store this response request_method = environ.get('REQUEST_METHOD', 'GET') if request_method not in self.allowed_methods: return if not (status.startswith('200') or status.startswith('203')): return if environ['wsgi.url_scheme'] == 'https': if not self.store_https_responses: return if self._check_no_cache(response_headers, environ): return cc_header = header_value(response_headers, 'Cache-Control') if cc_header: cc_parts = parse_cache_control_header(cc_header) try: if int(cc_parts.get('max-age', '0')) == 0: return except ValueError: return # if we didn't abort due to any condition above, store the response vary_header_names = [] vary = header_value(response_headers, 'Vary') if vary is not None: vary_header_names.extend( [ x.strip().lower() for x in vary.split(',') ]) if self.always_vary_on_headers: vary_header_names.extend(list(self.always_vary_on_headers)) if '*' in vary_header_names: return discriminators = Discriminators(request_headers, environ, vary_header_names, self.always_vary_on_environ) headers = endtoend(response_headers) url = construct_url(environ) # Response headers won't have a date if we aren't proxying to # another http server on our right hand side. date = header_value(response_headers, 'Date') if date is None: date = time.time() else: date = calendar.timegm(parsedate_tz(date)) expires = self._expires(date, response_headers) # XXX purge? return self.storage.store( url, discriminators, expires, status, headers, )
def should_intercept(self, status, headers): """Callback to determine if the content should be intercepted """ apply_theme = not header_value(headers, 'x-deliverance-no-theme') if not apply_theme: return False content_type = header_value(headers, 'content-type') if content_type is None: return True # 304s can have no content-type return (content_type.startswith('text/html') or content_type.startswith('application/xhtml+xml'))
def gzip_start_response(self, status, headers, exc_info=None): cnttype = header_value(headers, 'content-type') cntenc = header_value(headers, 'content-encoding') # Compress only if content-type is 'text/*' or 'application/*' typeok = cnttype and \ (cnttype.startswith('text/') or cnttype.startswith('application/')) self.compressible = \ True if typeok and ('zip' not in cnttype) and (not cntenc) else False self.compressible and headers.append(('content-encoding', 'gzip')) remove_header(headers, 'content-length') self.headers = headers self.status = status return self.buffer.write
def gzip_start_response(self, status, headers, exc_info=None): self.headers = headers ct = header_value(headers, "content-type") ce = header_value(headers, "content-encoding") self.compressible = False if ct and (ct.startswith("text/") or ct.startswith("application/")) and "zip" not in ct: self.compressible = True if ce: self.compressible = False if self.compressible: headers.append(("content-encoding", "gzip")) remove_header(headers, "content-length") self.headers = headers self.status = status return self.buffer.write
def __call__(self, environ, start_response): catch_response = [] body = [] def replace_start_response(status, headers, exc_info=None): catch_response.extend([status, headers]) start_response(status, headers, exc_info) return body.append def run_app(): body.extend(self.app(environ, replace_start_response)) # Run in profiler prof = cProfile.Profile() prof.runctx("run_app()", globals(), locals()) # Build up body with stats body = ''.join(body) headers = catch_response[1] content_type = response.header_value(headers, 'content-type') if not content_type.startswith('text/html'): # We can't add info to non-HTML output return [body] stats = pstats.Stats(prof) stats.strip_dirs() stats.sort_stats('time', 'calls') output = pstats_as_html(stats, self.limit) body += template % output return [body]
def __init__(self, format, data, content_type=None, headers=None): self.format = format self.data = data if headers is None: headers = [] elif hasattr(headers, 'items'): headers = headers.items() else: headers = list(headers) header_content_type = header_value(headers, 'content-type') if (header_content_type is not None and content_type is not None and header_content_type != content_type): raise TypeError( "You've given an explicit header of Content-Type: " "%s and passed content_type=%r, which is ambiguous" % (header_content_type, content_type)) if header_content_type is None: if content_type is None: content_type = format.content_types[0] headers.append(('Content-Type', content_type)) elif content_type is None: content_type = header_content_type self.content_type = content_type self.headers = headers
def make_wsgi_input_length(self, body, headers, internal): content_type = header_value(headers, 'content-type') if internal: return LazySerialize(self, content_type, body), '1' else: data = ''.join(self.dump_iter(body, content_type)) return StringIO(data), str(len(data))
def challenge(self, environ, status, app_headers, forget_headers): ''' the challenge method is implemented here to supress the came_from query_attribute, which is not welcomed here :-) ''' reason = header_value(app_headers, 'X-Authorization-Failure-Reason') # split the login url in host, url, ? , query url_parts = list(urlparse.urlparse(self.login_form_url)) # here we extend the query to contain the reason as parameter query = url_parts[4] query_elements = cgi.parse_qs(query) if reason: query_elements[self.reason_param] = reason # now rebuild the query string and the header url_parts[4] = urllib.urlencode(query_elements, doseq=True) login_form_url = urlparse.urlunparse(url_parts) headers = [ ('Location', login_form_url) ] cookies = [(h, v) for (h, v) in app_headers if h.lower() == 'set-cookie'] headers = headers + forget_headers + cookies return HTTPFound(headers=headers)
def is_html(self, status, headers, body): type = header_value(headers, 'content-type') if type and (type.startswith('text/html') or type.startswith('application/xhtml+xml')): if self.HTML_DOC_PAT.search(body) is not None: return True return False
def request(self, uri, method="GET", body=None, headers=None, wsgi_request=None, input=None, output=None, trusted=False): method = method.upper() wsgi_request = self._coerce_wsgi_request(wsgi_request) headers = self._coerce_headers(headers) if isinstance(output, basestring) and output.startswith('name '): output = get_format(output[5:].strip()) input, body, headers = self._coerce_input( input, body, headers) if body and not header_value(headers, 'content-type'): # We have to add a content type... content_type = input.choose_mimetype(headers, body) replace_header(headers, 'content-type', content_type) headers = self._set_accept(headers, output) if wsgi_request is not None: uri = self._resolve_uri(uri, wsgi_request) if self._internally_resolvable(uri, wsgi_request): return self._internal_request( uri, method=method, body=body, headers=headers, wsgi_request=wsgi_request, input=input, output=output, trusted=trusted) else: if not scheme_re.search(uri): raise ValueError( 'You gave a non-absolute URI (%r) and no wsgi_request to ' 'normalize it against' % uri) return self._external_request( uri, method=method, body=body, headers=headers, wsgi_request=wsgi_request, input=input, output=output, trusted=trusted)
def _create_response(self, status, headers, output, app_iter, trusted): content_type = header_value(headers, 'content-type') if app_iter is None: # @@: Can this really happen? # What happens with a 204 No Content? return self._make_response( status, headers, data=None) if not output: # Easy, return plain output # @@: Check charset? return self._make_response( status, headers, data=''.join(app_iter)) if isinstance(output, basestring): if output.startswith('name '): output = get_format(output[5:].strip()) else: # Must be a Python type output = find_format_match( output, content_type) elif isinstance(output, Format): pass else: raise TypeError( "Invalid value for output: %r" % output) data = output.parse_wsgi_response( status, headers, app_iter, trusted=trusted) return self._make_response( status, headers, data=data)
def challenge(self, environ, status, app_headers, forget_headers): ''' the challenge method is implemented here to supress the came_from query_attribute, which is not welcomed here :-) ''' reason = header_value(app_headers, 'X-Authorization-Failure-Reason') # split the login url in host, url, ? , query url_parts = list(urlparse.urlparse(self.login_form_url)) # here we extend the query to contain the reason as parameter query = url_parts[4] query_elements = cgi.parse_qs(query) if reason: query_elements[self.reason_param] = reason # now rebuild the query string and the header url_parts[4] = urllib.urlencode(query_elements, doseq=True) login_form_url = urlparse.urlunparse(url_parts) headers = [('Location', login_form_url)] cookies = [(h, v) for (h, v) in app_headers if h.lower() == 'set-cookie'] headers = headers + forget_headers + cookies return HTTPFound(headers=headers)
def gzip_start_response(self, status, headers, exc_info=None): self.headers = headers ct = header_value(headers,'content-type') ce = header_value(headers,'content-encoding') self.compressible = False if ct and (ct.startswith('text/') or ct.startswith('application/')) \ and 'zip' not in ct: self.compressible = True if ce: self.compressible = False if self.compressible: headers.append(('content-encoding', 'gzip')) remove_header(headers, 'content-length') self.headers = headers self.status = status return self.buffer.write
def __call__(self, environ, start_response): catch_response = [] body = [] def replace_start_response(status, headers, exc_info=None): catch_response.extend([status, headers]) start_response(status, headers, exc_info) return body.append def run_app(): body.extend(self.app(environ, replace_start_response)) # Run in profiler prof = cProfile.Profile() prof.runctx( "run_app()", globals(), locals() ) # Build up body with stats body = ''.join(body) headers = catch_response[1] content_type = response.header_value(headers, 'content-type') if not content_type.startswith('text/html'): # We can't add info to non-HTML output return [body] stats = pstats.Stats( prof ) stats.strip_dirs() stats.sort_stats( 'time', 'calls' ) output = pstats_as_html( stats, self.limit ) body += template % output return [body]
def start_response_wrapper(status, headers, exc_info=None): location_header = 'location' status_code = int(status.split(None,1)[0]) if (status_code >= 301 and status_code <= 303) or status_code == 307: location = header_value(headers, location_header) if location: replace_header(headers, location_header, resolve_relative_url(location, environ)) return start_response(status, headers, exc_info)
def gzip_start_response(self, status, headers, exc_info=None): self.headers = headers ct = header_value(headers, 'content-type') ce = header_value(headers, 'content-encoding') self.compressible = False # This statement is the only change in this monkeypatch: if ct and (ct.startswith('text/') or ct.startswith('application/')) \ and 'zip' not in ct and ct != 'application/x-shockwave-flash': self.compressible = True if ce: self.compressible = False if self.compressible: headers.append(('content-encoding', 'gzip')) remove_header(headers, 'content-length') self.headers = headers self.status = status return self.buffer.write
def get_mimetype_from_headers(headers, strip_params=False): mimetype = header_value(headers, 'Content-type') if not mimetype: raise ValueError('No Content-Type header in headers: %r' % headers) if strip_params: return mimetype.split(';', 1)[0] else: return mimetype
def repl_start_response(status, headers, exc_info=None): ct = header_value(headers, 'content-type') if ct and ct.startswith('text/html'): type.append(ct) remove_header(headers, 'content-length') start_response(status, headers, exc_info) return body.append return start_response(status, headers, exc_info)
def test_redapp(): """ check that redirect returns the correct, expected results """ saved = [] def saveit(status, headers, exc_info = None): saved.append((status,headers)) def redapp(environ, start_response): raise HTTPFound("/bing/foo") app = HTTPExceptionHandler(redapp) result = list(app({'HTTP_ACCEPT': 'text/html'},saveit)) assert b'<a href="/bing/foo">' in result[0] assert "302 Found" == saved[0][0] if six.PY3: assert "text/html; charset=utf8" == header_value(saved[0][1], 'content-type') else: assert "text/html" == header_value(saved[0][1], 'content-type') assert "/bing/foo" == header_value(saved[0][1],'location') result = list(app({'HTTP_ACCEPT': 'text/plain'},saveit)) assert "text/plain; charset=utf8" == header_value(saved[1][1],'content-type') assert "/bing/foo" == header_value(saved[1][1],'location')
def test_redapp(): """ check that redirect returns the correct, expected results """ saved = [] def saveit(status, headers, exc_info = None): saved.append((status,headers)) def redapp(environ, start_response): raise HTTPFound("/bing/foo") app = HTTPExceptionHandler(redapp) result = list(app({'HTTP_ACCEPT': 'text/html'},saveit)) assert '<a href="/bing/foo">' in result[0] assert "302 Found" == saved[0][0] assert "text/html" == header_value(saved[0][1], 'content-type') assert "/bing/foo" == header_value(saved[0][1],'location') result = list(app({'HTTP_ACCEPT': 'text/plain'},saveit)) print result[0] == ( '302 Found\n' 'This resource was found at /bing/foo;\n' 'you should be redirected automatically.\n') assert "text/plain; charset=utf8" == header_value(saved[1][1],'content-type') assert "/bing/foo" == header_value(saved[1][1],'location')
def test_basic(key='key', val='bingles'): app = build(dump_environ,{key:val}) (status,headers,content,errors) = \ raw_interactive(app) value = header_value(headers,'Set-Cookie') assert "Path=/;" in value assert "expires=" not in value cookie = value.split(";")[0] (status,headers,content,errors) = \ raw_interactive(app,{'HTTP_COOKIE': cookie}) assert ("%s: %s" % (key,val.replace("\n","\n "))) in content
def get_response_info(self, status, headers): info = {} cl = header_value(headers, 'content-length') info['headers'] = list(headers) try: cl = int(cl) except (TypeError, ValueError): cl = None info['content-length'] = cl info['status'] = status return info
def _expires(self, date, headers): cc_header = header_value(headers, 'Cache-Control') expires_header = header_value(headers, 'Expires') # logic stolen from httplib2 if cc_header is not None: header_parts = parse_cache_control_header(cc_header) if 'max-age' in header_parts: try: lifetime = int(header_parts['max-age']) return date + lifetime except ValueError: #pragma NO COVER belt-and-suspenders return date if expires_header is not None: expires = parsedate_tz(expires_header) if expires is None: return date else: return calendar.timegm(expires)
def test_redapp(): """ check that redirect returns the correct, expected results """ saved = [] def saveit(status, headers, exc_info = None): saved.append((status,headers)) def redapp(environ, start_response): raise HTTPFound("/bing/foo") app = HTTPExceptionHandler(redapp) result = list(app({'HTTP_ACCEPT': 'text/html'},saveit)) assert '<a href="/bing/foo">' in result[0] assert "302 Found" == saved[0][0] assert "text/html" == header_value(saved[0][1], 'content-type') assert "/bing/foo" == header_value(saved[0][1],'location') result = list(app({'HTTP_ACCEPT': 'text/plain'},saveit)) print(result[0] == ( '302 Found\n' 'This resource was found at /bing/foo;\n' 'you should be redirected automatically.\n')) assert "text/plain; charset=utf8" == header_value(saved[1][1],'content-type') assert "/bing/foo" == header_value(saved[1][1],'location')
def gzip_start_response(self, status, headers, exc_info=None): self.headers = headers ct = header_value(headers, "content-type") ce = header_value(headers, "content-encoding") self.compressible = False # This statement is the only change in this monkeypatch: if ( ct and (ct.startswith("text/") or ct.startswith("application/")) and "zip" not in ct and ct != "application/x-shockwave-flash" ): self.compressible = True if ce: self.compressible = False if self.compressible: headers.append(("content-encoding", "gzip")) remove_header(headers, "content-length") self.headers = headers self.status = status return self.buffer.write
def match(self, request_headers, environ): for discrim in self.discriminators: typ, (stored_name, stored_value) = discrim if typ == 'env': strval = environ.get(stored_name) elif typ == 'vary': strval = header_value(request_headers, stored_name) else: raise ValueError(discrim) if strval is None or strval != stored_value: return False return True
def challenge(self, environ, status, app_headers, forget_headers): reason = header_value(app_headers, 'X-Authorization-Failure-Reason') url_parts = list(urlparse.urlparse(self.login_form_url)) query = url_parts[4] query_elements = cgi.parse_qs(query) if reason: query_elements[self.reason_param] = reason url_parts[4] = urllib.urlencode(query_elements, doseq=True) login_form_url = urlparse.urlunparse(url_parts) headers = [('Location', login_form_url)] cookies = [(h, v) for (h, v) in app_headers if h.lower() == 'set-cookie'] headers = headers + forget_headers + cookies return HTTPFound(headers=headers)
def __call__(self, environ, start_response): output = StringIO() response = [] def writer_start_response(status, headers, exc_info=None): response.extend((status, headers)) start_response(status, headers, exc_info) return output.write app_iter = self.app(environ, writer_start_response) try: for s in app_iter: output.write(s) finally: if hasattr(app_iter, 'close'): app_iter.close() page = output.getvalue() status, headers = response v = header_value(headers, 'content-type') or '' if (not v.startswith('text/html') and not v.startswith('text/xhtml') and not v.startswith('application/xhtml')): # Can't validate # @@: Should validate CSS too... but using what? return [page] ops = [] if v.startswith('text/xhtml+xml'): ops.append('--xml') # @@: Should capture encoding too html_errors = self.call_wdg_validate( self.wdg_path, ops, page) if html_errors: page = self.add_error(page, html_errors)[0] headers.remove( ('Content-Length', str(header_value(headers, 'content-length')))) headers.append(('Content-Length', str(len(page)))) return [page]
def __init__(self, detail=None, headers=None, comment=None): assert isinstance(headers, (type(None), list)) headers = headers or [] location = header_value(headers,'location') if not location: location = detail detail = '' headers.append(('location', location)) assert location, ("HTTPRedirection specified neither a " "location in the headers nor did it " "provide a detail argument.") HTTPRedirection.__init__(self, location, headers, comment) if detail is not None: self.detail = detail
def __init__(self, detail=None, headers=None, comment=None): assert isinstance(headers, (type(None), list)) headers = headers or [] location = header_value(headers, 'location') if not location: location = detail detail = '' headers.append(('location', location)) assert location, ("HTTPRedirection specified neither a " "location in the headers nor did it " "provide a detail argument.") HTTPRedirection.__init__(self, location, headers, comment) if detail is not None: self.detail = detail
def test_basic(key='key', val='bingles'): app = build(dump_environ, {key: val}) (status,headers,content,errors) = \ raw_interactive(app) value = header_value(headers, 'Set-Cookie') assert "Path=/;" in value assert "expires=" not in value cookie = value.split(";")[0] (status,headers,content,errors) = \ raw_interactive(app,{'HTTP_COOKIE': cookie}) expected = ("%s: %s" % (key, val.replace("\n", "\n "))) if six.PY3: expected = expected.encode('utf8') assert expected in content
def test_etag_304(): base_dir = os.path.dirname(__file__) test_dir = os.path.join(base_dir, 'test-data', '304') cache_app = CacheFixtureApp() index_page = CacheFixtureResponseInfo(open(os.path.join(test_dir,'index.html')).read()) page1 = CacheFixtureResponseInfo(open(os.path.join(test_dir,'page1.html')).read()) page2 = CacheFixtureResponseInfo(open(os.path.join(test_dir,'page2.html')).read()) cache_app.map_url('/index.html',index_page) cache_app.map_url('/page1.html',page1) cache_app.map_url('/page2.html',page2) index_page.etag = 'index' page1.etag = 'page1' page2.etag = 'page2' transcluder = TranscluderMiddleware(cache_app) test_app = TestApp(transcluder) #load up the deptracker result = test_app.get('/index.html') etag = header_value(result.headers, 'ETAG') assert etag is not None result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : etag}) assert result.status == 304 page1.etag = 'page1.new' result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : etag}) assert result.status == 200 new_etag = header_value(result.headers, 'ETAG') assert new_etag != etag result = test_app.get('/index.html', extra_environ={'HTTP_IF_NONE_MATCH' : new_etag}) assert result.status == 304
def __call__(self, environ, start_response): global _threadedprint_installed if environ.get('paste.testing'): # In a testing environment this interception isn't # useful: return self.app(environ, start_response) if (not _threadedprint_installed or self._threaded_print_stdout is not sys.stdout): # @@: Not strictly threadsafe _threadedprint_installed = True threadedprint.install(leave_stdout=not self.replace_stdout) self._threaded_print_stdout = sys.stdout removed = [] def remove_printdebug(): removed.append(None) environ['paste.remove_printdebug'] = remove_printdebug logged = StringIO() listeners = [logged] environ['paste.printdebug_listeners'] = listeners if self.print_wsgi_errors: listeners.append(environ['wsgi.errors']) replacement_stdout = TeeFile(listeners) threadedprint.register(replacement_stdout) try: status, headers, body = wsgilib.intercept_output( environ, self.app) if status is None: # Some error occurred status = '500 Server Error' headers = [('Content-type', 'text/html')] start_response(status, headers) if not body: body = 'An error occurred' content_type = response.header_value(headers, 'content-type') if (removed or (not self.force_content_type and (not content_type or not content_type.startswith('text/html')))): if replacement_stdout == logged: # Then the prints will be lost, unless... environ['wsgi.errors'].write(logged.getvalue()) start_response(status, headers) return [body] response.remove_header(headers, 'content-length') body = self.add_log(body, logged.getvalue()) start_response(status, headers) return [body] finally: threadedprint.deregister()
def test_redapp(): """ check that redirect returns the correct, expected results """ saved = [] def saveit(status, headers, exc_info=None): saved.append((status, headers)) def redapp(environ, start_response): raise HTTPFound("/bing/foo") app = HTTPExceptionHandler(redapp) result = list(app({'HTTP_ACCEPT': 'text/html'}, saveit)) assert b'<a href="/bing/foo">' in result[0] assert "302 Found" == saved[0][0] if six.PY3: assert "text/html; charset=utf8" == header_value( saved[0][1], 'content-type') else: assert "text/html" == header_value(saved[0][1], 'content-type') assert "/bing/foo" == header_value(saved[0][1], 'location') result = list(app({'HTTP_ACCEPT': 'text/plain'}, saveit)) assert "text/plain; charset=utf8" == header_value(saved[1][1], 'content-type') assert "/bing/foo" == header_value(saved[1][1], 'location')
def __call__(self, environ, start_response): output = StringIO() response = [] def writer_start_response(status, headers, exc_info=None): response.extend((status, headers)) start_response(status, headers, exc_info) return output.write app_iter = self.app(environ, writer_start_response) try: for s in app_iter: output.write(s) finally: if hasattr(app_iter, 'close'): app_iter.close() page = output.getvalue() status, headers = response v = header_value(headers, 'content-type') or '' if (not v.startswith('text/html') and not v.startswith('text/xhtml') and not v.startswith('application/xhtml')): # Can't validate # @@: Should validate CSS too... but using what? return [page] ops = [] if v.startswith('text/xhtml+xml'): ops.append('--xml') # @@: Should capture encoding too html_errors = self.call_wdg_validate(self.wdg_path, ops, page) if html_errors: page = self.add_error(page, html_errors)[0] headers.remove( ('Content-Length', str(header_value(headers, 'content-length')))) headers.append(('Content-Length', str(len(page)))) return [page]
def challenge(self, environ, status, app_headers, forget_headers): reason = header_value(app_headers, 'X-Authorization-Failure-Reason') url_parts = list(urlparse.urlparse(self.login_form_url)) query = url_parts[4] query_elements = cgi.parse_qs(query) if reason: query_elements[self.reason_param] = reason url_parts[4] = urllib.urlencode(query_elements, doseq=True) login_form_url = urlparse.urlunparse(url_parts) headers = [('Location', login_form_url)] cookies = [(h, v) for (h, v) in app_headers if h.lower() == 'set-cookie'] headers = headers + forget_headers + cookies # cleanup the session id identity = environ.get('repoze.who.identity') if identity and status == "401 Unauthorized": self.forget(environ, identity) return HTTPFound(headers=headers)
def challenge(self, environ, status, app_headers, forget_headers): if self.reason_param is not None or self.came_from_param is not None: url_parts = self._login_url_parts[:] query = url_parts[4] query_elements = cgi.parse_qs(query) if self.reason_param is not None: reason = header_value(app_headers, self.reason_header) if reason: query_elements[self.reason_param] = reason if self.came_from_param is not None: query_elements[self.came_from_param] = construct_url(environ) url_parts[4] = urllib.urlencode(query_elements, doseq=True) login_url = urlparse.urlunparse(url_parts) else: login_url = self.login_url headers = [('Location', login_url)] + forget_headers cookies = [(h,v) for (h,v) in app_headers if h.lower() == 'set-cookie'] headers += cookies return HTTPFound(headers=headers)
def prepare_content(self, environ): if self.headers: headers = list(self.headers) else: headers = [] if 'html' in environ.get('HTTP_ACCEPT','') or \ '*/*' in environ.get('HTTP_ACCEPT',''): replace_header(headers, 'content-type', 'text/html') content = self.html(environ) else: replace_header(headers, 'content-type', 'text/plain') content = self.plain(environ) if isinstance(content, unicode): content = content.encode('utf8') cur_content_type = (header_value(headers, 'content-type') or 'text/html') replace_header(headers, 'content-type', cur_content_type + '; charset=utf8') return headers, content
def __call__(self, environ, start_response): catch_response = [] body = [] def replace_start_response(status, headers, exc_info=None): catch_response.extend([status, headers]) start_response(status, headers, exc_info) return body.append def run_app(): app_iter = self.app(environ, replace_start_response) try: body.extend(app_iter) finally: if hasattr(app_iter, 'close'): app_iter.close() self.lock.acquire() try: prof = hotshot.Profile(self.log_filename) prof.addinfo('URL', environ.get('PATH_INFO', '')) try: prof.runcall(run_app) finally: prof.close() body = ''.join(body) headers = catch_response[1] content_type = response.header_value(headers, 'content-type') if content_type is None or not content_type.startswith( 'text/html'): # We can't add info to non-HTML output return [body] stats = hotshot.stats.load(self.log_filename) stats.strip_dirs() stats.sort_stats('time', 'calls') output = capture_output(stats.print_stats, self.limit) output_callers = capture_output(stats.print_callers, self.limit) body += '<pre style="%s">%s\n%s</pre>' % ( self.style, cgi.escape(output), cgi.escape(output_callers)) return [body] finally: self.lock.release()
def _discriminate(self, entries, request_headers, environ): matching_entries = entries[:] for entry in entries: discrims, expires, status, headers, body, extras = entry for discrim in discrims: typ, (stored_name, stored_value) = discrim if typ == 'env': strval = environ.get(stored_name) elif typ == 'vary': strval = header_value(request_headers, stored_name) else: #pragma NO COVER raise ValueError(discrim) if strval is None or strval != stored_value: matching_entries.remove(entry) break if matching_entries: match = matching_entries[0] # this is essentially random return match
def prepare_content(self, environ): if self.headers: headers = list(self.headers) else: headers = [] if 'html' in environ.get('HTTP_ACCEPT','') or \ '*/*' in environ.get('HTTP_ACCEPT',''): replace_header(headers, 'content-type', 'text/html') content = self.html(environ) else: replace_header(headers, 'content-type', 'text/plain') content = self.plain(environ) if isinstance(content, six.text_type): content = content.encode('utf8') cur_content_type = ( header_value(headers, 'content-type') or 'text/html') replace_header( headers, 'content-type', cur_content_type + '; charset=utf8') return headers, content
def __call__(self, environ, start_response): catch_response = [] body = [] def replace_start_response(status, headers, exc_info=None): catch_response.extend([status, headers]) start_response(status, headers, exc_info) return body.append def run_app(): app_iter = self.app(environ, replace_start_response) try: body.extend(app_iter) finally: if hasattr(app_iter, "close"): app_iter.close() self.lock.acquire() try: prof = hotshot.Profile(self.log_filename) prof.addinfo("URL", environ.get("PATH_INFO", "")) try: prof.runcall(run_app) finally: prof.close() body = "".join(body) headers = catch_response[1] content_type = response.header_value(headers, "content-type") if content_type is None or not content_type.startswith("text/html"): # We can't add info to non-HTML output return [body] stats = hotshot.stats.load(self.log_filename) stats.strip_dirs() stats.sort_stats("time", "calls") output = capture_output(stats.print_stats, self.limit) output_callers = capture_output(stats.print_callers, self.limit) body += '<pre style="%s">%s\n%s</pre>' % (self.style, cgi.escape(output), cgi.escape(output_callers)) return [body] finally: self.lock.release()
def _serialize_body(self, input, body, headers): if input: return ''.join(input.dump_iter(body, header_value(headers, 'content-type'))) else: return body