def test_call_no_transform_on_HEAD_removes_content_length(self): response = [] def mock_start_response(status, headers, exc_info=None): response.extend((status, headers)) def mock_app(environ, start_response): self.failIf('bitblt' in environ.get('PATH_INFO')) response = webob.Response(jpeg_image_data, content_type='image/jpeg') return response(environ, start_response) middleware = self._makeOne(mock_app) width = height = "32" signature = transform.compute_signature(width, height, middleware.secret) request = webob.Request.blank('bitblt-%sx%s-%s/foo.jpg' % (width, height, signature), method='HEAD') result = middleware(request.environ, mock_start_response) response_length = len("".join(result)) status, headers = response headers = ResponseHeaders(headers) self.assertEqual(status, '200 OK') self.assertEqual(headers['content-type'], 'image/jpeg') self.assertTrue('content-length' not in headers)
def test_cache_remote_file(self, filestore, httpget, gen, xom): link = gen.pypi_package_link("pytest-1.8.zip", md5=False) entry = filestore.maplink(link, "root", "pypi", "pytest") assert not entry.hash_spec and not entry.file_exists() filestore.keyfs.restart_as_write_transaction() headers = ResponseHeaders({ "content-length": "3", "last-modified": "Thu, 25 Nov 2010 20:00:27 GMT" }) httpget.url2response[link.url] = dict(status_code=200, headers=headers, raw=BytesIO(b"123")) for part in iter_cache_remote_file(xom, entry): pass rheaders = entry.gethttpheaders() assert rheaders["content-length"] == "3" assert rheaders["content-type"] in zip_types assert rheaders["last-modified"] == headers["last-modified"] bytes = entry.file_get_content() assert bytes == b"123" # reget entry and check about content filestore.keyfs.restart_as_write_transaction() entry = filestore.get_file_entry(entry.relpath) assert entry.file_exists() assert entry.hash_value == getdigest(bytes, entry.hash_type) assert entry.file_size() == 3 rheaders = entry.gethttpheaders() assert entry.file_get_content() == b"123"
def _headers__get(self): """ The headers in a dictionary-like object """ if self._headers is None: self._headers = ResponseHeaders.view_list(self.headerlist) return self._headers
def test_headers(self): """Test headers from the response.""" self.assertEqual( self.res.headers, ResponseHeaders([('XDODS-Server', 'pydap/' + __version__), ('Content-description', 'dods_ascii'), ('Content-type', 'text/plain; charset=ascii'), ('Content-Length', '440')]))
def custom_start_response(status, headers, exc_info=None): headers = ResponseHeaders(headers) req_id_header = request.headers.get(REQUEST_ID_HEADER, None) if req_id_header: headers[REQUEST_ID_HEADER] = req_id_header return start_response(status, headers._items, exc_info)
def test_headers(self): """Test the response headers.""" res = self.app.get('/.html') self.assertEqual( res.headers, ResponseHeaders([('XDODS-Server', 'pydap/' + __version__), ('Content-description', 'dods_form'), ('Content-type', 'text/html; charset=utf-8'), ('Content-Length', '5864')]))
def custom_start_response(status, headers, exc_info=None): headers = ResponseHeaders(headers) origin = request.headers.get("Origin") origins = OrderedSet(cfg.CONF.api.allow_origin) # Build a list of the default allowed origins public_api_url = cfg.CONF.auth.api_url # Default gulp development server WebUI URL origins.add("http://127.0.0.1:3000") # By default WebUI simple http server listens on 8080 origins.add("http://localhost:8080") origins.add("http://127.0.0.1:8080") if public_api_url: # Public API URL origins.add(public_api_url) origins = list(origins) if origin: if "*" in origins: origin_allowed = origin else: # See http://www.w3.org/TR/cors/#access-control-allow-origin-response-header origin_allowed = origin if origin in origins else list( origins)[0] else: origin_allowed = list(origins)[0] methods_allowed = ["GET", "POST", "PUT", "DELETE", "OPTIONS"] request_headers_allowed = [ "Content-Type", "Authorization", HEADER_ATTRIBUTE_NAME, HEADER_API_KEY_ATTRIBUTE_NAME, REQUEST_ID_HEADER, ] response_headers_allowed = [ "Content-Type", "X-Limit", "X-Total-Count", REQUEST_ID_HEADER, ] headers["Access-Control-Allow-Origin"] = origin_allowed headers["Access-Control-Allow-Methods"] = ",".join(methods_allowed) headers["Access-Control-Allow-Headers"] = ",".join( request_headers_allowed) headers["Access-Control-Allow-Credentials"] = "true" headers["Access-Control-Expose-Headers"] = ",".join( response_headers_allowed) return start_response(status, headers._items, exc_info)
def _headerlist__set(self, value): self._headers = {} if not isinstance(value, list): if hasattr(value, 'items'): value = value.items() value = list(value) headers = ResponseHeaders.view_list(self.headerlist) for hname in headers.keys(): self._headers[hname.lower()] = (hname, headers[hname]) self._headerlist = value
def test_headers(self): """Test the response headers.""" self.assertEqual( self.res.headers, ResponseHeaders([('XDODS-Server', 'pydap/' + __version__), ('Content-description', 'dods_das'), ('Content-type', 'text/plain; charset=ascii'), ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type'), ('Content-Length', '510')]))
def test_headers(self): """Test the headers in the response.""" self.assertEqual( self.res.headers, ResponseHeaders([('XDODS-Server', 'pydap/' + __version__), ('Content-description', 'dods_data'), ('Content-type', 'application/octet-stream'), ('Access-Control-Allow-Origin', '*'), ('Access-Control-Allow-Headers', 'Origin, X-Requested-With, Content-Type'), ('Content-Length', '238')]))
def clean_response_headers(response): headers = ResponseHeaders() # remove hop by hop headers, see: # https://www.mnot.net/blog/2011/07/11/what_proxies_must_do hop_keys = set(hop_by_hop) connection = response.headers.get('connection') if connection and connection.lower() != 'close': hop_keys.update(x.strip().lower() for x in connection.split(',')) for k, v in response.headers.items(): if k.lower() in hop_keys: continue headers[k] = v return headers
def test_iterfile_remote_no_headers(self, filestore, httpget, gen, xom): link = gen.pypi_package_link("pytest-1.8.zip", md5=False) entry = filestore.maplink(link, "root", "pypi", "pytest") assert not entry.hash_spec headers = ResponseHeaders({}) httpget.url2response[link.url] = dict(status_code=200, headers=headers, raw=BytesIO(b"123")) for part in iter_cache_remote_file(xom, entry): pass rheaders = entry.gethttpheaders() assert rheaders["content-length"] == "3" assert rheaders["content-type"] in zip_types assert entry.file_get_content() == b"123"
def custom_start_response(status, headers, exc_info=None): headers = ResponseHeaders(headers) origin = request.headers.get('Origin') origins = OrderedSet(cfg.CONF.api.allow_origin) # Build a list of the default allowed origins public_api_url = cfg.CONF.auth.api_url # Default gulp development server WebUI URL origins.add('http://127.0.0.1:3000') # By default WebUI simple http server listens on 8080 origins.add('http://localhost:8080') origins.add('http://127.0.0.1:8080') if public_api_url: # Public API URL origins.add(public_api_url) origins = list(origins) if origin: if '*' in origins: origin_allowed = origin else: # See http://www.w3.org/TR/cors/#access-control-allow-origin-response-header origin_allowed = origin if origin in origins else list( origins)[0] else: origin_allowed = list(origins)[0] methods_allowed = ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'] request_headers_allowed = [ 'Content-Type', 'Authorization', HEADER_ATTRIBUTE_NAME, HEADER_API_KEY_ATTRIBUTE_NAME, REQUEST_ID_HEADER ] response_headers_allowed = [ 'Content-Type', 'X-Limit', 'X-Total-Count', REQUEST_ID_HEADER ] headers['Access-Control-Allow-Origin'] = origin_allowed headers['Access-Control-Allow-Methods'] = ','.join(methods_allowed) headers['Access-Control-Allow-Headers'] = ','.join( request_headers_allowed) headers['Access-Control-Allow-Credentials'] = 'true' headers['Access-Control-Expose-Headers'] = ','.join( response_headers_allowed) return start_response(status, headers._items, exc_info)
def add_metadata(self, environ, identity): request = Request(environ) log.debug( b_('CSRFMetadataProvider.add_metadata(%(r_path)s)') % {'r_path': to_bytes(request.path)}) session_id = environ.get(self.auth_session_id) if not session_id: session_id = request.cookies.get(self.session_cookie) log.debug(b_('session_id = %(s_id)r') % {'s_id': to_bytes(session_id)}) if session_id and session_id != 'Set-Cookie:': environ[self.auth_session_id] = session_id token = sha1(session_id).hexdigest() identity.update({self.csrf_token_id: token}) log.debug(b_('Identity updated with CSRF token')) path = self.strip_script(environ, request.path) if path == self.login_handler: log.debug(b_('Setting CSRF_AUTH_STATE')) environ[self.auth_state] = True environ[self.token_env] = token else: environ[self.token_env] = self.extract_csrf_token(request) app = environ.get('repoze.who.application') if app: # This occurs during login in some application configurations if isinstance(app, HTTPFound) and environ.get(self.auth_state): log.debug( b_('Got HTTPFound(302) from' ' repoze.who.application')) # What possessed people to make this a string or # a function? location = app.location if hasattr(location, '__call__'): location = location() loc = update_qs(location, {self.csrf_token_id: str(token)}) headers = app.headers.items() replace_header(headers, 'location', loc) app.headers = ResponseHeaders(headers) log.debug( b_('Altered headers: %(headers)s') % {'headers': to_bytes(app.headers)}) else: log.warning( b_('Invalid session cookie %(s_id)r, not setting CSRF' ' token!') % {'s_id': to_bytes(session_id)})
def test_iterfile_remote_error_md5(self, filestore, httpget, gen, xom): link = gen.pypi_package_link("pytest-3.0.zip") entry = filestore.maplink(link, "root", "pypi", "pytest") assert entry.hash_spec and entry.hash_spec == link.hash_spec headers = ResponseHeaders({ "content-length": "3", "last-modified": "Thu, 25 Nov 2010 20:00:27 GMT", "content-type": "application/zip" }) httpget.url2response[link.url_nofrag] = dict(status_code=200, headers=headers, raw=BytesIO(b"123")) with pytest.raises(ValueError, match=link.md5): for part in iter_cache_remote_file(xom, entry): pass assert not entry.file_exists()
def clientside_actions(self, req, resp, log): extra_headers = parse_meta_headers(resp.body) if extra_headers: response_headers = ResponseHeaders(resp.headerlist + extra_headers) else: response_headers = resp.headers try: classes = run_matches(self.matchers, req, resp, response_headers, log) except AbortTheme: assert 0, 'no abort should happen' if 'X-Deliverance-Page-Class' in response_headers: classes.extend( resp.headers['X-Deliverance-Page-Class'].strip().split()) if 'deliverance.page_classes' in req.environ: classes.extend(req.environ['deliverance.page_classes']) if not classes: classes = ['default'] rules = [] for class_name in classes: ## FIXME: handle case of unknown classes ## Or do that during compilation? for rule in self.rules_by_class.get(class_name, []): if rule not in rules: rules.append(rule) if rule.theme: assert 0, 'no rule themes should be present' resp = force_charset(resp) content_doc = self.parse_document(resp.unicode_body, req.url) actions = [] run_standard = True for rule in rules: if rule.match is not None: matches = rule.match(req, resp, response_headers, log) if not matches: log.debug(rule, "Skipping <rule>") continue actions.extend(rule.clientside_actions(content_doc, log)) if rule.suppress_standard: run_standard = False if run_standard: ## FIXME: should it be possible to put the standard rule in the ruleset? actions.extend(standard_rule.clientside_actions(content_doc, log)) return actions
def test_iterfile_remote_nosize(self, filestore, httpget, gen, xom): link = gen.pypi_package_link("pytest-3.0.zip", md5=False) entry = filestore.maplink(link, "root", "pypi", "pytest") assert not entry.hash_spec headers = ResponseHeaders({ "last-modified": "Thu, 25 Nov 2010 20:00:27 GMT", "content-length": None }) assert entry.file_size() is None httpget.url2response[link.url] = dict(status_code=200, headers=headers, raw=BytesIO(b"1")) for part in iter_cache_remote_file(xom, entry): pass assert entry.file_get_content() == b"1" entry2 = filestore.get_file_entry(entry.relpath) assert entry2.file_size() == 1 rheaders = entry.gethttpheaders() assert rheaders["last-modified"] == headers["last-modified"] assert rheaders["content-type"] in zip_types
def test_call_is_untransformed_image(self): response = [] def mock_start_response(status, headers, exc_info=None): response.extend((status, headers)) def mock_app(environ, start_response): self.failIf('bitblt' in environ.get('PATH_INFO')) response = webob.Response(jpeg_image_data, content_type='image/jpeg') return response(environ, start_response) middleware = self._makeOne(mock_app) request = webob.Request.blank('foo.jpg') result = middleware(request.environ, mock_start_response) status, headers = response headers = ResponseHeaders(headers) self.assertEqual(status, '200 OK') self.assertEqual(headers['content-type'], 'image/jpeg') self.assertEqual(headers['content-length'], str(len(jpeg_image_data)))
def custom_start_response(status, headers, exc_info=None): headers = ResponseHeaders(headers) headers['Cache-Control'] = CACHE_CONTROL_HEADER return start_response(status, headers._items, exc_info)
def apply_rules(self, req, resp, resource_fetcher, log, default_theme=None): """ Apply the whatever the appropriate rules are to the request/response. """ extra_headers = parse_meta_headers(resp.body) if extra_headers: response_headers = ResponseHeaders(resp.headerlist + extra_headers) else: response_headers = resp.headers try: classes = run_matches(self.matchers, req, resp, response_headers, log) except AbortTheme: return resp if 'X-Deliverance-Page-Class' in response_headers: log.debug(self, "Found page class %s in headers", response_headers['X-Deliverance-Page-Class'].strip()) classes.extend(response_headers['X-Deliverance-Page-Class'].strip().split()) if 'deliverance.page_classes' in req.environ: log.debug(self, "Found page class in WSGI environ: %s", ' '.join(req.environ["deliverance.page_classes"])) classes.extend(req.environ['deliverance.page_classes']) if not classes: classes = ['default'] rules = [] theme = None for class_name in classes: ## FIXME: handle case of unknown classes ## Or do that during compilation? for rule in self.rules_by_class.get(class_name, []): if rule not in rules: rules.append(rule) if rule.theme: theme = rule.theme if theme is None: theme = self.default_theme if theme is None and default_theme is not None: theme = Theme(href=default_theme, source_location=self.source_location) if theme is None: log.error(self, "No theme has been defined for the request") return resp try: theme_href = theme.resolve_href(req, resp, log) original_theme_resp = self.get_theme_response( theme_href, resource_fetcher, log) theme_doc = self.get_theme_doc( original_theme_resp, theme_href, should_escape_cdata=True, should_fix_meta_charset_position=True) resp = force_charset(resp) body = resp.unicode_body body = escape_cdata(body) body = fix_meta_charset_position(body) content_doc = self.parse_document(body, req.url) run_standard = True for rule in rules: if rule.match is not None: matches = rule.match(req, resp, response_headers, log) if not matches: log.debug(rule, "Skipping <rule>") continue rule.apply(content_doc, theme_doc, resource_fetcher, log) if rule.suppress_standard: run_standard = False if run_standard: ## FIXME: should it be possible to put the standard rule in the ruleset? standard_rule.apply(content_doc, theme_doc, resource_fetcher, log) except AbortTheme: return resp remove_content_attribs(theme_doc) ## FIXME: handle caching? if original_theme_resp.body.strip().startswith("<!DOCTYPE"): tree = theme_doc.getroottree() else: tree = content_doc.getroottree() if "XHTML" in tree.docinfo.doctype: method = "xml" else: method = "html" theme_str = tostring(theme_doc, include_meta_content_type=True) theme_str = tree.docinfo.doctype + theme_str theme_doc = document_fromstring(theme_str) tree = theme_doc.getroottree() resp.body = tostring(tree, method=method, include_meta_content_type=True) resp.body = unescape_cdata(resp.body) return resp