def ready(self): print("DjangoBokehConfig.ready()") os.environ['BOKEH_NODEJS_PATH'] = settings.BOKEH_NODEJS_PATH bokeh_app_base_path = os.path.join(settings.BASE_DIR, "djangobokeh", "bokeh_apps") path_list = glob.glob(os.path.join(bokeh_app_base_path, "*.py")) print(path_list) applications = {} for path in path_list: application = build_single_handler_application(path) route = application.handlers[0].url_path() if not route: if '/' in applications: raise RuntimeError( "Don't know the URL path to use for %s" % (path)) route = '/' applications[route] = application if callable(applications): applications = Application(FunctionHandler(applications)) if isinstance(applications, Application): applications = {'/': applications} for k, v in list(applications.items()): if callable(v): applications[k] = Application(FunctionHandler(v)) if all(not isinstance(handler, DocumentLifecycleHandler) for handler in applications[k]._handlers): applications[k].add(DocumentLifecycleHandler()) self._applications = dict() for k, v in applications.items(): self._applications[k] = ApplicationContext(v, url=k) self.routing_config = RoutingConfiguration(self._applications)
def __init__( self, applications, prefix=None, extra_websocket_origins=None, extra_patterns=None, secret_key=settings.secret_key_bytes(), sign_sessions=settings.sign_sessions(), generate_session_ids=True, keep_alive_milliseconds=DEFAULT_KEEP_ALIVE_MS, check_unused_sessions_milliseconds=DEFAULT_CHECK_UNUSED_MS, unused_session_lifetime_milliseconds=DEFAULT_UNUSED_LIFETIME_MS, stats_log_frequency_milliseconds=DEFAULT_STATS_LOG_FREQ_MS, mem_log_frequency_milliseconds=DEFAULT_MEM_LOG_FREQ_MS, use_index=True, redirect_root=True, websocket_max_message_size_bytes=DEFAULT_WEBSOCKET_MAX_MESSAGE_SIZE_BYTES, websocket_compression_level=None, websocket_compression_mem_level=None, index=None, auth_provider=NullAuth(), xsrf_cookies=False, include_headers=None, include_cookies=None, exclude_headers=None, exclude_cookies=None, session_token_expiration=DEFAULT_SESSION_TOKEN_EXPIRATION, **kwargs): # This will be set when initialize is called self._loop = None from bokeh.application.handlers.function import FunctionHandler from bokeh.application.handlers.document_lifecycle import DocumentLifecycleHandler if callable(applications): applications = Application(FunctionHandler(applications)) if isinstance(applications, Application): applications = {'/': applications} for k, v in list(applications.items()): if callable(v): applications[k] = Application(FunctionHandler(v)) if all(not isinstance(handler, DocumentLifecycleHandler) for handler in applications[k]._handlers): applications[k].add(DocumentLifecycleHandler()) if isinstance(applications, Application): applications = {'/': applications} if prefix is None: prefix = "" prefix = prefix.strip("/") if prefix: prefix = "/" + prefix self._prefix = prefix self._index = index if keep_alive_milliseconds < 0: # 0 means "disable" raise ValueError("keep_alive_milliseconds must be >= 0") else: if keep_alive_milliseconds == 0: log.info("Keep-alive ping disabled") elif keep_alive_milliseconds != DEFAULT_KEEP_ALIVE_MS: log.info("Keep-alive ping configured every %d milliseconds", keep_alive_milliseconds) self._keep_alive_milliseconds = keep_alive_milliseconds if check_unused_sessions_milliseconds <= 0: raise ValueError("check_unused_sessions_milliseconds must be > 0") elif check_unused_sessions_milliseconds != DEFAULT_CHECK_UNUSED_MS: log.info("Check for unused sessions every %d milliseconds", check_unused_sessions_milliseconds) self._check_unused_sessions_milliseconds = check_unused_sessions_milliseconds if unused_session_lifetime_milliseconds <= 0: raise ValueError( "unused_session_lifetime_milliseconds must be > 0") elif unused_session_lifetime_milliseconds != DEFAULT_UNUSED_LIFETIME_MS: log.info("Unused sessions last for %d milliseconds", unused_session_lifetime_milliseconds) self._unused_session_lifetime_milliseconds = unused_session_lifetime_milliseconds if stats_log_frequency_milliseconds <= 0: raise ValueError("stats_log_frequency_milliseconds must be > 0") elif stats_log_frequency_milliseconds != DEFAULT_STATS_LOG_FREQ_MS: log.info("Log statistics every %d milliseconds", stats_log_frequency_milliseconds) self._stats_log_frequency_milliseconds = stats_log_frequency_milliseconds if mem_log_frequency_milliseconds < 0: # 0 means "disable" raise ValueError("mem_log_frequency_milliseconds must be >= 0") elif mem_log_frequency_milliseconds > 0: if import_optional('psutil') is None: log.warning( "Memory logging requested, but is disabled. Optional dependency 'psutil' is missing. " "Try 'pip install psutil' or 'conda install psutil'") mem_log_frequency_milliseconds = 0 elif mem_log_frequency_milliseconds != DEFAULT_MEM_LOG_FREQ_MS: log.info("Log memory usage every %d milliseconds", mem_log_frequency_milliseconds) self._mem_log_frequency_milliseconds = mem_log_frequency_milliseconds if websocket_max_message_size_bytes <= 0: raise ValueError( "websocket_max_message_size_bytes must be positive") elif websocket_max_message_size_bytes != DEFAULT_WEBSOCKET_MAX_MESSAGE_SIZE_BYTES: log.info( "Torndado websocket_max_message_size set to %d bytes (%0.2f MB)", websocket_max_message_size_bytes, websocket_max_message_size_bytes / 1024.0**2) self.auth_provider = auth_provider if self.auth_provider.get_user or self.auth_provider.get_user_async: log.info("User authentication hooks provided (no default user)") else: log.info( "User authentication hooks NOT provided (default user enabled)" ) kwargs['xsrf_cookies'] = xsrf_cookies if xsrf_cookies: log.info("XSRF cookie protection enabled") if session_token_expiration <= 0: raise ValueError("session_token_expiration must be > 0") else: self._session_token_expiration = session_token_expiration if exclude_cookies and include_cookies: raise ValueError("Declare either an include or an exclude list" "for the cookies, not both.") self._exclude_cookies = exclude_cookies self._include_cookies = include_cookies if exclude_headers and include_headers: raise ValueError("Declare either an include or an exclude list" "for the headers, not both.") self._exclude_headers = exclude_headers self._include_headers = include_headers if extra_websocket_origins is None: self._websocket_origins = set() else: self._websocket_origins = set(extra_websocket_origins) self._secret_key = secret_key self._sign_sessions = sign_sessions self._generate_session_ids = generate_session_ids log.debug("These host origins can connect to the websocket: %r", list(self._websocket_origins)) # Wrap applications in ApplicationContext self._applications = dict() for k, v in applications.items(): self._applications[k] = ApplicationContext( v, url=k, logout_url=self.auth_provider.logout_url) extra_patterns = extra_patterns or [] extra_patterns.extend(self.auth_provider.endpoints) all_patterns = [] for key, app in applications.items(): app_patterns = [] for p in per_app_patterns: if key == "/": route = p[0] else: route = key + p[0] context = {"application_context": self._applications[key]} if issubclass(p[1], WSHandler): context['compression_level'] = websocket_compression_level context['mem_level'] = websocket_compression_mem_level route = self._prefix + route app_patterns.append((route, p[1], context)) websocket_path = None for r in app_patterns: if r[0].endswith("/ws"): websocket_path = r[0] if not websocket_path: raise RuntimeError("Couldn't find websocket path") for r in app_patterns: r[2]["bokeh_websocket_path"] = websocket_path all_patterns.extend(app_patterns) # add a per-app static path if requested by the application if app.static_path is not None: if key == "/": route = "/static/(.*)" else: route = key + "/static/(.*)" route = self._prefix + route all_patterns.append((route, StaticFileHandler, { "path": app.static_path })) for p in extra_patterns + toplevel_patterns: if p[1] == RootHandler: if use_index: data = { "applications": self._applications, "prefix": self._prefix, "index": self._index, "use_redirect": redirect_root } prefixed_pat = (self._prefix + p[0], ) + p[1:] + (data, ) all_patterns.append(prefixed_pat) else: prefixed_pat = (self._prefix + p[0], ) + p[1:] all_patterns.append(prefixed_pat) log.debug("Patterns are:") for line in pformat(all_patterns, width=60).split("\n"): log.debug(" " + line) super().__init__( all_patterns, websocket_max_message_size=websocket_max_message_size_bytes, **kwargs)
def __init__(self, applications, io_loop=None, http_server_kwargs=None, **kwargs): ''' Create a ``Server`` instance. Args: applications (dict[str, Application] or Application or callable) : A mapping from URL paths to Application instances, or a single Application to put at the root URL. The Application is a factory for Documents, with a new Document initialized for each Session. Each application is identified by a path that corresponds to a URL, like "/" or "/myapp" If a single Application is provided, it is mapped to the URL path "/" automatically. As a convenience, a callable may also be provided, in which an Application will be created for it using ``FunctionHandler``. io_loop (IOLoop, optional) : An explicit Tornado ``IOLoop`` to run Bokeh Server code on. If None, ``IOLoop.current()`` will be used (default: None) http_server_kwargs (dict, optional) : Extra arguments passed to ``tornado.httpserver.HTTPServer``. E.g. ``max_buffer_size`` to specify the maximum upload size. More details can be found at: http://www.tornadoweb.org/en/stable/httpserver.html#http-server If None, no extra arguments are passed (default: None) Additionally, the following options may be passed to configure the operation of ``Server``: .. bokeh-options:: _ServerOpts :module: bokeh.server.server Any remaining keyword arguments will be passed as-is to ``BokehTornado``. ''' log.info("Starting Bokeh server version %s (running on Tornado %s)" % (__version__, tornado.version)) from bokeh.application.handlers.function import FunctionHandler from bokeh.application.handlers.document_lifecycle import DocumentLifecycleHandler if callable(applications): applications = Application(FunctionHandler(applications)) if isinstance(applications, Application): applications = { '/' : applications } for k, v in list(applications.items()): if callable(v): applications[k] = Application(FunctionHandler(v)) if all(not isinstance(handler, DocumentLifecycleHandler) for handler in applications[k]._handlers): applications[k].add(DocumentLifecycleHandler()) opts = _ServerOpts(kwargs) self._port = opts.port self._address = opts.address self._prefix = opts.prefix if opts.num_procs != 1: assert all(app.safe_to_fork for app in applications.values()), ( 'User application code has run before attempting to start ' 'multiple processes. This is considered an unsafe operation.') if opts.num_procs > 1 and io_loop is not None: raise RuntimeError( "Setting both num_procs and io_loop in Server is incompatible. Use BaseServer to coordinate an explicit IOLoop and multi-process HTTPServer" ) if opts.num_procs > 1 and sys.platform == "win32": raise RuntimeError("num_procs > 1 not supported on Windows") if http_server_kwargs is None: http_server_kwargs = {} http_server_kwargs.setdefault('xheaders', opts.use_xheaders) sockets, self._port = bind_sockets(self.address, self.port) extra_websocket_origins = create_hosts_whitelist(opts.allow_websocket_origin, self.port) try: tornado_app = BokehTornado(applications, extra_websocket_origins=extra_websocket_origins, prefix=self.prefix, websocket_max_message_size_bytes=opts.websocket_max_message_size, **kwargs) http_server = HTTPServer(tornado_app, **http_server_kwargs) http_server.start(opts.num_procs) http_server.add_sockets(sockets) except Exception: for s in sockets: s.close() raise # Can only refer to IOLoop after HTTPServer.start() is called, see #5524 if io_loop is None: io_loop = IOLoop.current() super(Server, self).__init__(io_loop, tornado_app, http_server)
def _fixup(self, app: Application) -> Application: if not any( isinstance(handler, DocumentLifecycleHandler) for handler in app.handlers): app.add(DocumentLifecycleHandler()) return app
def customize_kwargs(self, args, server_kwargs): '''Allows subclasses to customize ``server_kwargs``. Should modify and return a copy of the ``server_kwargs`` dictionary. ''' kwargs = dict(server_kwargs) if 'index' not in kwargs: kwargs['index'] = INDEX_HTML # Handle tranquilized functions in the supplied functions kwargs['extra_patterns'] = patterns = kwargs.get('extra_patterns', []) static_dirs = parse_vars(args.static_dirs) if args.static_dirs else {} patterns += get_static_routes(static_dirs) files = [] for f in args.files: if args.glob: files.extend(glob(f)) else: files.append(f) if args.index and not args.index.endswith('.html'): found = False for ext in self._extensions: index = args.index if args.index.endswith( ext) else f'{args.index}{ext}' if any(f.endswith(index) for f in files): found = True if not found: raise ValueError( "The --index argument must either specify a jinja2 " "template with a .html file extension or select one " "of the applications being served as the default. " f"The specified application {index!r} could not be " "found.") # Handle tranquilized functions in the supplied functions if args.rest_provider in REST_PROVIDERS: pattern = REST_PROVIDERS[args.rest_provider](files, args.rest_endpoint) patterns.extend(pattern) elif args.rest_provider is not None: raise ValueError("rest-provider %r not recognized." % args.rest_provider) config.autoreload = args.autoreload if config.autoreload: for f in files: watch(f) if args.setup: setup_path = args.setup with open(setup_path) as f: setup_source = f.read() nodes = ast.parse(setup_source, os.fspath(setup_path)) code = compile(nodes, filename=setup_path, mode='exec', dont_inherit=True) module_name = 'panel_setup_module' module = ModuleType(module_name) module.__dict__['__file__'] = fullpath(setup_path) exec(code, module.__dict__) state._setup_module = module if args.warm or args.autoreload: argvs = {f: args.args for f in files} applications = build_single_handler_applications(files, argvs) if args.autoreload: with record_modules(): for app in applications.values(): doc = app.create_document() with set_curdoc(doc): state._on_load(None) _cleanup_doc(doc) else: for app in applications.values(): doc = app.create_document() with set_curdoc(doc): state._on_load(None) _cleanup_doc(doc) prefix = args.prefix if prefix is None: prefix = "" prefix = prefix.strip("/") if prefix: prefix = "/" + prefix config.profiler = args.profiler if args.admin: from ..io.admin import admin_panel from ..io.server import per_app_patterns config._admin = True app = Application(FunctionHandler(admin_panel)) unused_timeout = args.check_unused_sessions or 15000 app_ctx = AdminApplicationContext(app, unused_timeout=unused_timeout, url='/admin') if all(not isinstance(handler, DocumentLifecycleHandler) for handler in app._handlers): app.add(DocumentLifecycleHandler()) app_patterns = [] for p in per_app_patterns: route = '/admin' + p[0] context = {"application_context": app_ctx} route = prefix + route app_patterns.append((route, p[1], context)) websocket_path = None for r in app_patterns: if r[0].endswith("/ws"): websocket_path = r[0] if not websocket_path: raise RuntimeError("Couldn't find websocket path") for r in app_patterns: r[2]["bokeh_websocket_path"] = websocket_path try: import snakeviz SNAKEVIZ_PATH = os.path.join( os.path.dirname(snakeviz.__file__), 'static') app_patterns.append( ('/snakeviz/static/(.*)', StaticFileHandler, dict(path=SNAKEVIZ_PATH))) except Exception: pass patterns.extend(app_patterns) config.session_history = args.session_history if args.rest_session_info: pattern = REST_PROVIDERS['param'](files, 'rest') patterns.extend(pattern) state.publish('session_info', state, ['session_info']) if args.num_threads is not None: if config.nthreads is not None: raise ValueError( "Supply num_threads either using the environment variable " "PANEL_NUM_THREADS or as an explicit argument, not both.") config.nthreads = args.num_threads if args.oauth_provider: config.oauth_provider = args.oauth_provider config.oauth_expiry = args.oauth_expiry_days if config.oauth_key and args.oauth_key: raise ValueError( "Supply OAuth key either using environment variable " "or via explicit argument, not both.") elif args.oauth_key: config.oauth_key = args.oauth_key elif not config.oauth_key: raise ValueError( "When enabling an OAuth provider you must supply " "a valid oauth_key either using the --oauth-key " "CLI argument or PANEL_OAUTH_KEY environment " "variable.") if config.oauth_secret and args.oauth_secret: raise ValueError( "Supply OAuth secret either using environment variable " "or via explicit argument, not both.") elif args.oauth_secret: config.oauth_secret = args.oauth_secret elif not config.oauth_secret: raise ValueError( "When enabling an OAuth provider you must supply " "a valid OAuth secret either using the --oauth-secret " "CLI argument or PANEL_OAUTH_SECRET environment " "variable.") if args.oauth_extra_params: config.oauth_extra_params = ast.literal_eval( args.oauth_extra_params) if config.oauth_encryption_key and args.oauth_encryption_key: raise ValueError( "Supply OAuth encryption key either using environment " "variable or via explicit argument, not both.") elif args.oauth_encryption_key: encryption_key = args.oauth_encryption_key.encode('ascii') try: key = base64.urlsafe_b64decode(encryption_key) except Exception: raise ValueError( "OAuth encryption key was not a valid base64 " "string. Generate an encryption key with " "`panel oauth-secret` and ensure you did not " "truncate the returned string.") if len(key) != 32: raise ValueError( "OAuth encryption key must be 32 url-safe " "base64-encoded bytes.") config.oauth_encryption_key = encryption_key else: print("WARNING: OAuth has not been configured with an " "encryption key and will potentially leak " "credentials in cookies and a JWT token embedded " "in the served website. Use at your own risk or " "generate a key with the `panel oauth-key` CLI " "command and then provide it to `panel serve` " "using the PANEL_OAUTH_ENCRYPTION environment " "variable or the --oauth-encryption-key CLI " "argument.") if config.oauth_encryption_key: try: from cryptography.fernet import Fernet except ImportError: raise ImportError( "Using OAuth2 provider with Panel requires the " "cryptography library. Install it with `pip install " "cryptography` or `conda install cryptography`.") state.encryption = Fernet(config.oauth_encryption_key) if args.cookie_secret and config.cookie_secret: raise ValueError( "Supply cookie secret either using environment " "variable or via explicit argument, not both.") elif args.cookie_secret: config.cookie_secret = args.cookie_secret else: raise ValueError( "When enabling an OAuth provider you must supply " "a valid cookie_secret either using the --cookie-secret " "CLI argument or the PANEL_COOKIE_SECRET environment " "variable.") kwargs['auth_provider'] = OAuthProvider( error_template=args.oauth_error_template) if args.oauth_redirect_uri and config.oauth_redirect_uri: raise ValueError( "Supply OAuth redirect URI either using environment " "variable or via explicit argument, not both.") elif args.oauth_redirect_uri: config.oauth_redirect_uri = args.oauth_redirect_uri if args.oauth_jwt_user and config.oauth_jwt_user: raise ValueError( "Supply OAuth JWT user either using environment " "variable or via explicit argument, not both.") elif args.oauth_jwt_user: config.oauth_jwt_user = args.oauth_jwt_user if config.cookie_secret: kwargs['cookie_secret'] = config.cookie_secret return kwargs