def testCsp_globalDomainWhiteList(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, '<b>hello</b>', 'text/html', csp_scripts_sha256s=['abcd']) expected_csp = ( "default-src 'none';base-uri 'self';connect-src 'self';" "font-src 'self';img-src 'self' data: https://example.com;object-src 'none';" "style-src https://www.gstatic.com data: 'unsafe-inline' https://googol.com;" "script-src https://tensorflow.org/tensorboard strict-dynamic 'sha256-abcd'" ) self.assertEqual(r.headers.get('Content-Security-Policy'), expected_csp)
def testCsp(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, "<b>hello</b>", "text/html", csp_scripts_sha256s=["abcdefghi"]) expected_csp = ( "default-src 'self';font-src 'self' data:;frame-ancestors *;" "frame-src 'self';img-src 'self' data: blob:;object-src 'none';" "style-src 'self' https://www.gstatic.com data: 'unsafe-inline';" "connect-src 'self';script-src 'self' 'unsafe-eval' 'sha256-abcdefghi'" ) self.assertEqual(r.headers.get("Content-Security-Policy"), expected_csp)
def test_accept_mixin(): request = wrappers.Request({ 'HTTP_ACCEPT': 'text/xml,application/xml,application/xhtml+xml,' 'text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5', 'HTTP_ACCEPT_CHARSET': 'ISO-8859-1,utf-8;q=0.7,*;q=0.7', 'HTTP_ACCEPT_ENCODING': 'gzip,deflate', 'HTTP_ACCEPT_LANGUAGE': 'en-us,en;q=0.5' }) assert request.accept_mimetypes == MIMEAccept([ ('text/xml', 1), ('image/png', 1), ('application/xml', 1), ('application/xhtml+xml', 1), ('text/html', 0.9), ('text/plain', 0.8), ('*/*', 0.5) ]) strict_eq(request.accept_charsets, CharsetAccept([ ('ISO-8859-1', 1), ('utf-8', 0.7), ('*', 0.7) ])) strict_eq(request.accept_encodings, Accept([ ('gzip', 1), ('deflate', 1)])) strict_eq(request.accept_languages, LanguageAccept([ ('en-us', 1), ('en', 0.5)])) request = wrappers.Request({'HTTP_ACCEPT': ''}) strict_eq(request.accept_mimetypes, MIMEAccept())
def __call__(self, environ, start_response): request = wrappers.Request(environ) LOG.debug("Request: %s", request) try: response = self._proxy_request(request.remote_addr, request.path, request.query_string) except Exception: LOG.exception("Unexpected error.") msg = ('An unknown error has occurred. ' 'Please try your request again.') response = exceptions.InternalServerError(description=unicode(msg)) return response(environ, start_response)
def testPrecompressedResponse_streamingDecompression_catchesBadSize(self): orig_text = b"hello hello hello world" gzip_text = _gzip(orig_text) # Corrupt the gzipped data's stored content size (last 4 bytes). bad_text = gzip_text[:-4] + _bitflip(gzip_text[-4:]) q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, bad_text, "text/plain", content_encoding="gzip") # Streaming gunzip defers actual unzipping until response is used; once # we iterate over the whole file-wrapper application iterator, the # underlying GzipFile should be closed, and throw the size check error. with six.assertRaisesRegex(self, IOError, "Incorrect length"): _ = list(r.response)
def testCsp_globalDomainWhiteList(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, "<b>hello</b>", "text/html", csp_scripts_sha256s=["abcd"]) expected_csp = ( "default-src 'self';font-src 'self';frame-ancestors *;" "frame-src 'self' https://myframe.com;" "img-src 'self' data: blob: https://example.com;" "object-src 'none';style-src 'self' https://www.gstatic.com data: " "'unsafe-inline' https://googol.com;script-src " "https://tensorflow.org/tensorboard 'self' 'unsafe-eval' 'sha256-abcd'" ) self.assertEqual(r.headers.get("Content-Security-Policy"), expected_csp)
def test_etag_request_mixin(): request = wrappers.Request({ 'HTTP_CACHE_CONTROL': 'no-store, no-cache', 'HTTP_IF_MATCH': 'W/"foo", bar, "baz"', 'HTTP_IF_NONE_MATCH': 'W/"foo", bar, "baz"', 'HTTP_IF_MODIFIED_SINCE': 'Tue, 22 Jan 2008 11:18:44 GMT', 'HTTP_IF_UNMODIFIED_SINCE': 'Tue, 22 Jan 2008 11:18:44 GMT' }) assert request.cache_control.no_store assert request.cache_control.no_cache for etags in request.if_match, request.if_none_match: assert etags('bar') assert etags.contains_raw('W/"foo"') assert etags.contains_weak('foo') assert not etags.contains('foo') assert request.if_modified_since == datetime(2008, 1, 22, 11, 18, 44) assert request.if_unmodified_since == datetime(2008, 1, 22, 11, 18, 44)
def __call__(self, environ, start_response): # pylint: disable=invalid-name """Central entry point for the TensorBoard application. This method handles routing to sub-applications. It does simple routing using regular expression matching. This __call__ method conforms to the WSGI spec, so that instances of this class are WSGI applications. Args: environ: See WSGI spec. start_response: See WSGI spec. Returns: A werkzeug Response. """ request = wrappers.Request(environ) parsed_url = urlparse.urlparse(request.path) clean_path = _clean_path(parsed_url.path) # pylint: disable=too-many-function-args if clean_path in self.data_applications: return self.data_applications[clean_path](environ, start_response) else: tf.logging.warning('path %s not found, sending 404', clean_path) return http_util.Respond(request, 'Not found', 'text/plain', code=404)(environ, start_response)
def dispatch_request(self, environ: wsgi_types.Environment, start_response: wsgi_types.StartResponse) -> Response: """Handles the routing of requests. Args: environ (wsgi_types.Environment): Required. The WSGI environment. start_response (wsgi_types.StartResponse): Required. The response callable provided by the WSGI server. Returns: A response iterable. """ # Check for existince of route request = wrappers.Request(environ) if request.path in self._routes: return self._routes[request.path](environ, start_response) response = wrappers.Response("Not Found", status=404) return response(environ, start_response)
def test_etag_request(): request = wrappers.Request({ "HTTP_CACHE_CONTROL": "no-store, no-cache", "HTTP_IF_MATCH": 'W/"foo", bar, "baz"', "HTTP_IF_NONE_MATCH": 'W/"foo", bar, "baz"', "HTTP_IF_MODIFIED_SINCE": "Tue, 22 Jan 2008 11:18:44 GMT", "HTTP_IF_UNMODIFIED_SINCE": "Tue, 22 Jan 2008 11:18:44 GMT", "SERVER_NAME": "eggs", "SERVER_PORT": "80", }) assert request.cache_control.no_store assert request.cache_control.no_cache for etags in request.if_match, request.if_none_match: assert etags("bar") assert etags.contains_raw('W/"foo"') assert etags.contains_weak("foo") assert not etags.contains("foo") dt = datetime(2008, 1, 22, 11, 18, 44, tzinfo=timezone.utc) assert request.if_modified_since == dt assert request.if_unmodified_since == dt
def __call__(self, environ, start_response): """Implementation of the WSGI interface.""" request = wrappers.Request(environ) try: parsed_url = urlparse(request.path) # Remove a trailing slash, if present. clean_path = parsed_url.path if clean_path.endswith('/'): clean_path = clean_path[:-1] if clean_path in self._handlers: return self._ServeCustomHandler(request, clean_path)(environ, start_response) else: is_index = not clean_path or clean_path == '/index.html' if is_index: clean_path = os.path.join(self._project_root, self._index_file) else: # Strip off the leading forward slash. Don't do it for index because # in the vulcanized version we use an absolute path. clean_path = os.path.join(self._project_root, clean_path.lstrip('/')) response = self._ServeStaticFile(request, clean_path) except Exception as e: # pylint: disable=broad-except errors = (str(e), str(traceback.format_exc())) html_response = ( '<code>Uncaught error: %s <br><br> <code>%s</code></code>' % errors) logging.error('Uncaught error: %s \n\n %s', *errors) response = self.respond(request, html_response, 'text/html', 500) return response(environ, start_response)
def test_user_agent_mixin(): user_agents = [ ( "Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.11) " "Gecko/20071127 Firefox/2.0.0.11", "firefox", "macos", "2.0.0.11", "en-US", ), ( "Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; de-DE) Opera 8.54", "opera", "windows", "8.54", "de-DE", ), ( "Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420 " "(KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3", "safari", "iphone", "3.0", "en", ), ( "Bot Googlebot/2.1 ( http://www.googlebot.com/bot.html)", "google", None, "2.1", None, ), ( "Mozilla/5.0 (X11; CrOS armv7l 3701.81.0) AppleWebKit/537.31 " "(KHTML, like Gecko) Chrome/26.0.1410.57 Safari/537.31", "chrome", "chromeos", "26.0.1410.57", None, ), ( "Mozilla/5.0 (Windows NT 6.3; Trident/7.0; .NET4.0E; rv:11.0) like Gecko", "msie", "windows", "11.0", None, ), ( "Mozilla/5.0 (SymbianOS/9.3; Series60/3.2 NokiaE5-00/101.003; " "Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 " "(KHTML, like Gecko) NokiaBrowser/7.3.1.35 Mobile Safari/533.4 3gpp-gba", "safari", "symbian", "533.4", None, ), ( "Mozilla/5.0 (X11; OpenBSD amd64; rv:45.0) Gecko/20100101 Firefox/45.0", "firefox", "openbsd", "45.0", None, ), ( "Mozilla/5.0 (X11; NetBSD amd64; rv:45.0) Gecko/20100101 Firefox/45.0", "firefox", "netbsd", "45.0", None, ), ( "Mozilla/5.0 (X11; FreeBSD amd64) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/48.0.2564.103 Safari/537.36", "chrome", "freebsd", "48.0.2564.103", None, ), ( "Mozilla/5.0 (X11; FreeBSD amd64; rv:45.0) Gecko/20100101 Firefox/45.0", "firefox", "freebsd", "45.0", None, ), ( "Mozilla/5.0 (X11; U; NetBSD amd64; en-US; rv:) Gecko/20150921 " "SeaMonkey/1.1.18", "seamonkey", "netbsd", "1.1.18", "en-US", ), ( "Mozilla/5.0 (Windows; U; Windows NT 6.2; WOW64; rv:1.8.0.7) " "Gecko/20110321 MultiZilla/4.33.2.6a SeaMonkey/8.6.55", "seamonkey", "windows", "8.6.55", None, ), ( "Mozilla/5.0 (X11; Linux x86_64; rv:12.0) Gecko/20120427 Firefox/12.0 " "SeaMonkey/2.9", "seamonkey", "linux", "2.9", None, ), ( "Mozilla/5.0 (compatible; Baiduspider/2.0; " "+http://www.baidu.com/search/spider.html)", "baidu", None, "2.0", None, ), ( "Mozilla/5.0 (X11; SunOS i86pc; rv:38.0) Gecko/20100101 Firefox/38.0", "firefox", "solaris", "38.0", None, ), ( "Mozilla/5.0 (X11; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0 " "Iceweasel/38.7.1", "firefox", "linux", "38.0", None, ), ( "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 " "(KHTML, like Gecko) Chrome/50.0.2661.75 Safari/537.36", "chrome", "windows", "50.0.2661.75", None, ), ( "Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)", "bing", None, "2.0", None, ), ( "Mozilla/5.0 (X11; DragonFly x86_64) AppleWebKit/537.36 " "(KHTML, like Gecko) Chrome/47.0.2526.106 Safari/537.36", "chrome", "dragonflybsd", "47.0.2526.106", None, ), ( "Mozilla/5.0 (X11; U; DragonFly i386; de; rv:1.9.1) " "Gecko/20090720 Firefox/3.5.1", "firefox", "dragonflybsd", "3.5.1", "de", ), ( "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_5) AppleWebKit/537.36" "(KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36 OPR/60.0.3255.95", "opera", "macos", "60.0.3255.95", None, ), ] for ua, browser, platform, version, lang in user_agents: request = wrappers.Request({"HTTP_USER_AGENT": ua}) strict_eq(request.user_agent.browser, browser) strict_eq(request.user_agent.platform, platform) strict_eq(request.user_agent.version, version) strict_eq(request.user_agent.language, lang) assert bool(request.user_agent) strict_eq(request.user_agent.to_header(), ua) strict_eq(str(request.user_agent), ua) request = wrappers.Request({"HTTP_USER_AGENT": "foo"}) assert not request.user_agent
def test_shallow_mode(): request = wrappers.Request({"QUERY_STRING": "foo=bar"}, shallow=True) assert request.args["foo"] == "bar" pytest.raises(RuntimeError, lambda: request.form["foo"])
def test_user_agent_mixin(): user_agents = [ ('Mozilla/5.0 (Macintosh; U; Intel Mac OS X; en-US; rv:1.8.1.11) ' 'Gecko/20071127 Firefox/2.0.0.11', 'firefox', 'macos', '2.0.0.11', 'en-US'), ('Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; de-DE) Opera 8.54', 'opera', 'windows', '8.54', 'de-DE'), ('Mozilla/5.0 (iPhone; U; CPU like Mac OS X; en) AppleWebKit/420 ' '(KHTML, like Gecko) Version/3.0 Mobile/1A543a Safari/419.3', 'safari', 'iphone', '3.0', 'en'), ('Bot Googlebot/2.1 ( http://www.googlebot.com/bot.html)', 'google', None, '2.1', None), ('Mozilla/5.0 (X11; CrOS armv7l 3701.81.0) AppleWebKit/537.31 ' '(KHTML, like Gecko) Chrome/26.0.1410.57 Safari/537.31', 'chrome', 'chromeos', '26.0.1410.57', None), ('Mozilla/5.0 (Windows NT 6.3; Trident/7.0; .NET4.0E; rv:11.0) like Gecko', 'msie', 'windows', '11.0', None), ('Mozilla/5.0 (SymbianOS/9.3; Series60/3.2 NokiaE5-00/101.003; ' 'Profile/MIDP-2.1 Configuration/CLDC-1.1 ) AppleWebKit/533.4 (KHTML, like Gecko) ' 'NokiaBrowser/7.3.1.35 Mobile Safari/533.4 3gpp-gba', 'safari', 'symbian', '533.4', None), ('Mozilla/5.0 (X11; OpenBSD amd64; rv:45.0) Gecko/20100101 Firefox/45.0', 'firefox', 'openbsd', '45.0', None), ('Mozilla/5.0 (X11; NetBSD amd64; rv:45.0) Gecko/20100101 Firefox/45.0', 'firefox', 'netbsd', '45.0', None), ('Mozilla/5.0 (X11; FreeBSD amd64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/48.0.2564.103 Safari/537.36', 'chrome', 'freebsd', '48.0.2564.103', None), ('Mozilla/5.0 (X11; FreeBSD amd64; rv:45.0) Gecko/20100101 Firefox/45.0', 'firefox', 'freebsd', '45.0', None), ('Mozilla/5.0 (X11; U; NetBSD amd64; en-US; rv:) Gecko/20150921 SeaMonkey/1.1.18', 'seamonkey', 'netbsd', '1.1.18', 'en-US'), ('Mozilla/5.0 (Windows; U; Windows NT 6.2; WOW64; rv:1.8.0.7) ' 'Gecko/20110321 MultiZilla/4.33.2.6a SeaMonkey/8.6.55', 'seamonkey', 'windows', '8.6.55', None), ('Mozilla/5.0 (X11; Linux x86_64; rv:12.0) Gecko/20120427 Firefox/12.0 SeaMonkey/2.9', 'seamonkey', 'linux', '2.9', None), ('Mozilla/5.0 (compatible; Baiduspider/2.0; +http://www.baidu.com/search/spider.html)', 'baidu', None, '2.0', None), ('Mozilla/5.0 (X11; SunOS i86pc; rv:38.0) Gecko/20100101 Firefox/38.0', 'firefox', 'solaris', '38.0', None), ('Mozilla/5.0 (X11; Linux x86_64; rv:38.0) Gecko/20100101 Firefox/38.0 Iceweasel/38.7.1', 'firefox', 'linux', '38.0', None), ('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/50.0.2661.75 Safari/537.36', 'chrome', 'windows', '50.0.2661.75', None), ('Mozilla/5.0 (compatible; bingbot/2.0; +http://www.bing.com/bingbot.htm)', 'bing', None, '2.0', None), ('Mozilla/5.0 (X11; DragonFly x86_64) AppleWebKit/537.36 (KHTML, like Gecko) ' 'Chrome/47.0.2526.106 Safari/537.36', 'chrome', 'dragonflybsd', '47.0.2526.106', None), ('Mozilla/5.0 (X11; U; DragonFly i386; de; rv:1.9.1) Gecko/20090720 Firefox/3.5.1', 'firefox', 'dragonflybsd', '3.5.1', 'de') ] for ua, browser, platform, version, lang in user_agents: request = wrappers.Request({'HTTP_USER_AGENT': ua}) strict_eq(request.user_agent.browser, browser) strict_eq(request.user_agent.platform, platform) strict_eq(request.user_agent.version, version) strict_eq(request.user_agent.language, lang) assert bool(request.user_agent) strict_eq(request.user_agent.to_header(), ua) strict_eq(str(request.user_agent), ua) request = wrappers.Request({'HTTP_USER_AGENT': 'foo'}) assert not request.user_agent
def test_request_method_case_sensitivity(): req = wrappers.Request({"REQUEST_METHOD": "get"}) assert req.method == "GET"
def testAcceptGzip_alreadyCompressed_sendsPrecompressedResponse(self): gzip_text = _gzip(b'hello hello hello world') e = wtest.EnvironBuilder(headers={'Accept-Encoding': 'gzip'}).get_environ() q = wrappers.Request(e) r = http_util.Respond(q, gzip_text, 'text/plain', content_encoding='gzip') self.assertEqual(r.response, [gzip_text]) # Still singly zipped
def testPlainText_appendsUtf8ToContentType(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, 'hello', 'text/plain') h = r.headers self.assertEqual(h.get('Content-Type'), 'text/plain; charset=utf-8')
def testJson_getsAutoSerialized(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, [1, 2, 3], 'application/json') self.assertEqual(r.response[0], b'[1, 2, 3]')
def test_request_method_case_sensitivity(): req = wrappers.Request({'REQUEST_METHOD': 'get'}) assert req.method == 'GET'
def test_shallow_mode(self): request = wrappers.Request({'QUERY_STRING': 'foo=bar'}, shallow=True) self.assert_equal(request.args['foo'], 'bar') self.assert_raises(RuntimeError, lambda: request.form['foo'])
def test_request_method_case_sensitivity(): req = wrappers.Request( {"REQUEST_METHOD": "get", "SERVER_NAME": "eggs", "SERVER_PORT": "80"} ) assert req.method == "GET"
def test_user_agent(): user_agent = "Mozilla/5.0 (X11; Linux x86_64; rv:94.0) Gecko/20100101 Firefox/94.0" request = wrappers.Request({"HTTP_USER_AGENT": user_agent}) assert request.user_agent.to_header() == user_agent assert str(request.user_agent) == user_agent assert request.user_agent.string == user_agent
def testHelloWorld(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, "<b>hello world</b>", "text/html") self.assertEqual(r.status_code, 200) self.assertEqual(r.response, [six.b("<b>hello world</b>")]) self.assertEqual(r.headers.get("Content-Length"), "18")
def testCsp_badGlobalDomainWhiteList(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) configs = [ "_CSP_SCRIPT_DOMAINS_WHITELIST", "_CSP_IMG_DOMAINS_WHITELIST", "_CSP_STYLE_DOMAINS_WHITELIST", "_CSP_FONT_DOMAINS_WHITELIST", "_CSP_FRAME_DOMAINS_WHITELIST", ] for config in configs: with mock.patch.object(http_util, config, ["http://tensorflow.org"]): with self.assertRaisesRegex( ValueError, "^Expected all whitelist to be a https URL"): http_util.Respond( q, "<b>hello</b>", "text/html", csp_scripts_sha256s=["abcd"], ) # Cannot grant more trust to a script from a remote source. with mock.patch.object( http_util, config, ["'strict-dynamic' 'unsafe-eval' https://tensorflow.org/"], ): with self.assertRaisesRegex( ValueError, "^Expected all whitelist to be a https URL"): http_util.Respond( q, "<b>hello</b>", "text/html", csp_scripts_sha256s=["abcd"], ) # Attempt to terminate the script-src to specify a new one that allows ALL! with mock.patch.object(http_util, config, ["https://tensorflow.org;script-src *"]): with self.assertRaisesRegex( ValueError, '^Expected whitelist domain to not contain ";"'): http_util.Respond( q, "<b>hello</b>", "text/html", csp_scripts_sha256s=["abcd"], ) # Attempt to use whitespace, delimit character, to specify a new one. with mock.patch.object(http_util, config, ["https://tensorflow.org *"]): with self.assertRaisesRegex( ValueError, "^Expected whitelist domain to not contain a whitespace", ): http_util.Respond( q, "<b>hello</b>", "text/html", csp_scripts_sha256s=["abcd"], )
def wsgi_app(self, environ, start_response): request = wrappers.Request(environ) response = self.dispatch_request(request) return response(environ, start_response)
def testExpires_setsCruiseControl(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, '<b>hello world</b>', 'text/html', expires=60) self.assertEqual(r.headers.get('Cache-Control'), 'private, max-age=60')
def test_shallow_mode(): request = wrappers.Request({'QUERY_STRING': 'foo=bar'}, shallow=True) assert request.args['foo'] == 'bar' pytest.raises(RuntimeError, lambda: request.form['foo'])
def testHelloWorld(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, '<b>hello world</b>', 'text/html') self.assertEqual(r.status_code, 200) self.assertEqual(r.response[0], six.b('<b>hello world</b>'))
def __call__(self, environ, start_response): # pragma: no cover response = self.dispatch_request(wrappers.Request(environ)) return response(environ, start_response)
def testHelloWorld(self): q = wrappers.Request(wtest.EnvironBuilder().get_environ()) r = http_util.Respond(q, '<b>hello world</b>', 'text/html') self.assertEqual(r.status_code, 200) self.assertEqual(r.response, [six.b('<b>hello world</b>')]) self.assertEqual(r.headers.get('Content-Length'), '18')