def _validate_db_class(self, proposal): value = proposal.value if isinstance(value, str): # if it's a string, import it value = _db_shortcuts.get(value.lower(), value) return import_item(value) return value
def _init_transformers(self): self._transformers = [] for value in self.transformers: if isinstance(value, str): value = import_item(value) self._transformers.append(value())
def __init__(self, package, versionspec): # if something deprecated '>=4.1.0' we want it to raise during the 4.1.0-dev, and 4.1.0-rc, # not just when we release 4.1.0, so remove any extra-tags. versionspec = versionspec.split('-')[0] current_version = traitlets.import_item(package+'.__version__') self.match = semver.match(current_version, versionspec) self.package = package self.spec = versionspec
def get_pkg_path(data_name, package=None): data_name = os.path.normpath(data_name) path = os.path.dirname(import_item(package).__file__) path = os.path.join(path, data_name) if not os.path.isdir(path): # pragma: no cover return os.path.dirname(path) return path
def _patch_app_base_handlers(app): """Patch Hub Authentication into the base handlers of an app Patches HubAuthenticatedHandler into: - App.base_handler_class (if defined) - jupyter_server's JupyterHandler (if already imported) - notebook's IPythonHandler (if already imported) """ BaseHandler = app_base_handler = getattr(app, "base_handler_class", None) base_handlers = [] if BaseHandler is not None: base_handlers.append(BaseHandler) # patch juptyer_server and notebook handlers if they have been imported for base_handler_name in [ "jupyter_server.base.handlers.JupyterHandler", "notebook.base.handlers.IPythonHandler", ]: modname, _ = base_handler_name.rsplit(".", 1) if modname in sys.modules: base_handlers.append(import_item(base_handler_name)) if not base_handlers: pkg = detect_base_package(app.__class__) if pkg == "jupyter_server": BaseHandler = import_item( "jupyter_server.base.handlers.JupyterHandler") elif pkg == "notebook": BaseHandler = import_item("notebook.base.handlers.IPythonHandler") else: raise ValueError( f"{app.__class__.__name__}.base_handler_class must be defined") base_handlers.append(BaseHandler) # patch-in HubAuthenticatedHandler to base handler classes for BaseHandler in base_handlers: patch_base_handler(BaseHandler) # return the first entry return base_handlers[0]
def validate(self, obj, value): if isinstance(value, str): try: value = traitlets.import_item(value) except Exception: self.error(obj, value) if six.callable(value): return value else: self.error(obj, value)
def __init__(self, **kwargs): super(MixedContentsManager, self).__init__(**kwargs) self.managers = {} ## check consistency of scheme. if not len(set(map(lambda x:x['root'], self.filesystem_scheme))) == len(self.filesystem_scheme): raise ValueError('Scheme should not mount two contents manager on the same mountpoint') kwargs.update({'parent':self}) for scheme in self.filesystem_scheme: manager_class = import_item(scheme['contents']) self.managers[scheme['root']] = manager_class(**kwargs)
def _import_mapping(mapping, original=None): """import any string-keys in a type mapping""" log = get_logger() log.debug("Importing canning map") for key, value in list(mapping.items()): if isinstance(key, str): try: cls = import_item(key) except Exception: if original and key not in original: # only message on user-added classes log.error("canning class not importable: %r", key, exc_info=True) mapping.pop(key) else: mapping[cls] = mapping.pop(key)
def generate_api(api_path): # name of the package dirname, name = os.path.split(os.path.split(api_path)[0]) if not dirname.endswith("spectrochempy"): dirname, _name = os.path.split(dirname) name = _name + "." + name pkgs = sys.modules["spectrochempy.%s" % name] api = sys.modules["spectrochempy.%s.api" % name] pkgs = list_packages(pkgs) __all__ = [] for pkg in pkgs: if pkg.endswith("api") or "test" in pkg: continue try: pkg = import_item(pkg) except Exception: if not hasattr(pkg, "__all__"): continue raise ImportError(pkg) if not hasattr(pkg, "__all__"): continue a = getattr(pkg, "__all__", []) dmethods = getattr(pkg, "__dataset_methods__", []) __all__ += a for item in a: # set general method for the current package API obj = getattr(pkg, item) setattr(api, item, obj) # some methods are class method of NDDatasets if item in dmethods: from spectrochempy.core.dataset.nddataset import NDDataset setattr(NDDataset, item, getattr(pkg, item)) return __all__
def __init__(self, **kwargs): super(MixedContentsManager, self).__init__(**kwargs) self.managers = {} ## check consistency of scheme. if not len(set(map(lambda x: x["root"], self.filesystem_scheme))) == len( self.filesystem_scheme): raise ValueError( "Scheme should not mount two contents manager on the same mountpoint" ) kwargs.update({"parent": self}) for scheme in self.filesystem_scheme: manager_class = import_item(scheme["class"]) self.managers[scheme["root"]] = manager_class(**kwargs) if scheme["config"]: for k, v in scheme["config"].items(): setattr(self.managers[scheme["root"]], k, v) self.log.debug("MANAGERS: %s", self.managers)
def make_singleuser_app(App): """Make and return a singleuser notebook app given existing notebook or jupyter_server Application classes, mix-in jupyterhub auth. Instances of App must have the following attributes defining classes: - .login_handler_class - .logout_handler_class - .base_handler_class (only required if not a subclass of the default app in jupyter_server or notebook) App should be a subclass of `notebook.notebookapp.NotebookApp` or `jupyter_server.serverapp.ServerApp`. """ empty_parent_app = App() log = empty_parent_app.log # detect base classes LoginHandler = empty_parent_app.login_handler_class LogoutHandler = empty_parent_app.logout_handler_class BaseHandler = getattr(empty_parent_app, "base_handler_class", None) if BaseHandler is None: pkg = detect_base_package(App) if pkg == "jupyter_server": BaseHandler = import_item( "jupyter_server.base.handlers.JupyterHandler") elif pkg == "notebook": BaseHandler = import_item("notebook.base.handlers.IPythonHandler") else: raise ValueError("{}.base_handler_class must be defined".format( App.__name__)) # patch-in HubAuthenticatedHandler to BaseHandler, # so anything inheriting from BaseHandler uses Hub authentication if HubAuthenticatedHandler not in BaseHandler.__bases__: new_bases = (HubAuthenticatedHandler, ) + BaseHandler.__bases__ log.debug( f"Patching {BaseHandler}{BaseHandler.__bases__} -> {BaseHandler}{new_bases}" ) BaseHandler.__bases__ = new_bases # We've now inserted our class as a parent of BaseHandler, # but we also need to ensure BaseHandler *itself* doesn't # override the public tornado API methods we have inserted. # If they are defined in BaseHandler, explicitly replace them with our methods. for name in ("get_current_user", "get_login_url"): if name in BaseHandler.__dict__: log.debug( f"Overriding {BaseHandler}.{name} with HubAuthenticatedHandler.{name}" ) method = getattr(HubAuthenticatedHandler, name) setattr(BaseHandler, name, method) # create Handler classes from mixins + bases class JupyterHubLoginHandler(JupyterHubLoginHandlerMixin, LoginHandler): pass class JupyterHubLogoutHandler(JupyterHubLogoutHandlerMixin, LogoutHandler): pass class OAuthCallbackHandler(OAuthCallbackHandlerMixin, BaseHandler): pass # create merged aliases & flags merged_aliases = {} merged_aliases.update(empty_parent_app.aliases or {}) merged_aliases.update(aliases) merged_flags = {} merged_flags.update(empty_parent_app.flags or {}) merged_flags.update(flags) # create mixed-in App class, bringing it all together class SingleUserNotebookApp(SingleUserNotebookAppMixin, App): aliases = merged_aliases flags = merged_flags classes = empty_parent_app.classes + [HubOAuth] login_handler_class = JupyterHubLoginHandler logout_handler_class = JupyterHubLogoutHandler oauth_callback_handler_class = OAuthCallbackHandler return SingleUserNotebookApp
def make_singleuser_app(App): """Make and return a singleuser notebook app given existing notebook or jupyter_server Application classes, mix-in jupyterhub auth. Instances of App must have the following attributes defining classes: - .login_handler_class - .logout_handler_class - .base_handler_class (only required if not a subclass of the default app in jupyter_server or notebook) App should be a subclass of `notebook.notebookapp.NotebookApp` or `jupyter_server.serverapp.ServerApp`. """ empty_parent_app = App() # detect base classes LoginHandler = empty_parent_app.login_handler_class LogoutHandler = empty_parent_app.logout_handler_class BaseHandler = getattr(empty_parent_app, "base_handler_class", None) if BaseHandler is None: pkg = detect_base_package(App) if pkg == "jupyter_server": BaseHandler = import_item("jupyter_server.base.handlers.JupyterHandler") elif pkg == "notebook": BaseHandler = import_item("notebook.base.handlers.IPythonHandler") else: raise ValueError( "{}.base_handler_class must be defined".format(App.__name__) ) # create Handler classes from mixins + bases class JupyterHubLoginHandler(JupyterHubLoginHandlerMixin, LoginHandler): pass class JupyterHubLogoutHandler(JupyterHubLogoutHandlerMixin, LogoutHandler): pass class OAuthCallbackHandler(OAuthCallbackHandlerMixin, BaseHandler): pass # create merged aliases & flags merged_aliases = {} merged_aliases.update(empty_parent_app.aliases or {}) merged_aliases.update(aliases) merged_flags = {} merged_flags.update(empty_parent_app.flags or {}) merged_flags.update(flags) # create mixed-in App class, bringing it all together class SingleUserNotebookApp(SingleUserNotebookAppMixin, App): aliases = merged_aliases flags = merged_flags classes = empty_parent_app.classes + [HubOAuth] login_handler_class = JupyterHubLoginHandler logout_handler_class = JupyterHubLogoutHandler oauth_callback_handler_class = OAuthCallbackHandler return SingleUserNotebookApp
def from_dict(cls, d, **kwargs): """Construct a Cluster from serialized state""" cluster_info = d["cluster"] if cluster_info.get("class"): specified_cls = import_item(cluster_info["class"]) if specified_cls is not cls: # specified a custom Cluster class, # dispatch to from_dict from that class return specified_cls.from_dict(d, **kwargs) kwargs.setdefault("shutdown_atexit", False) self = cls(**kwargs) for attr in self.traits(to_dict=True): if attr in cluster_info: setattr(self, attr, cluster_info[attr]) for attr in self.traits(to_dict=True): if attr in d: setattr(self, attr, d[attr]) cluster_key = ClusterManager._cluster_key(self) if d.get("controller"): controller_info = d["controller"] self.controller_launcher_class = controller_info["class"] # after traitlet coercion, which imports strings cls = self.controller_launcher_class if controller_info["state"]: try: self.controller = cls.from_dict( controller_info["state"], parent=self ) except launcher.NotRunning as e: self.log.error(f"Controller for {cluster_key} not running: {e}") else: self.controller.on_stop(self._controller_stopped) engine_info = d.get("engines") if engine_info: self.engine_launcher_class = engine_info["class"] # after traitlet coercion, which imports strings cls = self.engine_launcher_class for engine_set_id, engine_state in engine_info.get("sets", {}).items(): try: self.engines[engine_set_id] = engine_set = cls.from_dict( engine_state, engine_set_id=engine_set_id, parent=self, ) except launcher.NotRunning as e: self.log.error( f"Engine set {cluster_key}{engine_set_id} not running: {e}" ) else: engine_set.on_stop(partial(self._engines_stopped, engine_set_id)) # check if state changed if self.to_dict() != d: # if so, update our cluster file self.update_cluster_file() return self
async def start_python(self, name, starter, path, body): """ start a python starter """ func = T.import_item(starter["callable"]) return await func(name, starter, path, body, self)
.. versionchanged:: 2.0 Default app changed to launch `jupyter labhub`. Use JUPYTERHUB_SINGLEUSER_APP=notebook.notebookapp.NotebookApp for the legacy 'classic' notebook server. """ import os from traitlets import import_item from .mixins import make_singleuser_app JUPYTERHUB_SINGLEUSER_APP = os.environ.get("JUPYTERHUB_SINGLEUSER_APP") if JUPYTERHUB_SINGLEUSER_APP: App = import_item(JUPYTERHUB_SINGLEUSER_APP) else: App = None _import_error = None for JUPYTERHUB_SINGLEUSER_APP in ( "jupyter_server.serverapp.ServerApp", "notebook.notebookapp.NotebookApp", ): try: App = import_item(JUPYTERHUB_SINGLEUSER_APP) except ImportError as e: continue if _import_error is None: _import_error = e else: break
def init_schedulers(self): children = self.children mq = import_item(str(self.mq_class)) # ensure session key is shared across sessions self.config.Session.key = self.session.key ident = self.session.bsession def add_auth(q): """Add CURVE auth to a monitored queue""" if not self.enable_curve: return False q.setsockopt_in(zmq.CURVE_SERVER, 1) q.setsockopt_in(zmq.CURVE_SECRETKEY, self.curve_secretkey) q.setsockopt_out(zmq.CURVE_SERVER, 1) q.setsockopt_out(zmq.CURVE_SECRETKEY, self.curve_secretkey) # monitor is a client pub, secret = zmq.curve_keypair() q.setsockopt_mon(zmq.CURVE_SERVERKEY, self.curve_publickey) q.setsockopt_mon(zmq.CURVE_SECRETKEY, secret) q.setsockopt_mon(zmq.CURVE_PUBLICKEY, pub) # disambiguate url, in case of * monitor_url = disambiguate_url(self.monitor_url) # maybe_inproc = 'inproc://monitor' if self.use_threads else monitor_url # IOPub relay (in a Process) q = mq(zmq.SUB, zmq.PUB, zmq.PUB, b'iopub', b'N/A') add_auth(q) q.name = "IOPubScheduler" q.bind_in(self.engine_url('iopub')) q.setsockopt_in(zmq.SUBSCRIBE, b'') q.bind_out(self.client_url('iopub')) q.setsockopt_out(zmq.IDENTITY, ident + b"_iopub") q.connect_mon(monitor_url) q.daemon = True children.append(q) # Multiplexer Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'in', b'out') add_auth(q) q.name = "DirectScheduler" q.bind_in(self.client_url('mux')) q.setsockopt_in(zmq.IDENTITY, b'mux_in') q.bind_out(self.engine_url('mux')) q.setsockopt_out(zmq.IDENTITY, b'mux_out') q.connect_mon(monitor_url) q.daemon = True children.append(q) # Control Queue (in a Process) q = mq(zmq.ROUTER, zmq.ROUTER, zmq.PUB, b'incontrol', b'outcontrol') add_auth(q) q.name = "ControlScheduler" q.bind_in(self.client_url('control')) q.setsockopt_in(zmq.IDENTITY, b'control_in') q.bind_out(self.engine_url('control')) q.setsockopt_out(zmq.IDENTITY, b'control_out') q.connect_mon(monitor_url) q.daemon = True children.append(q) if 'TaskScheduler.scheme_name' in self.config: scheme = self.config.TaskScheduler.scheme_name else: scheme = TaskScheduler.scheme_name.default_value # Task Queue (in a Process) if scheme == 'pure': self.log.warning("task::using pure DEALER Task scheduler") q = mq(zmq.ROUTER, zmq.DEALER, zmq.PUB, b'intask', b'outtask') add_auth(q) q.name = "TaskScheduler(pure)" # q.setsockopt_out(zmq.HWM, hub.hwm) q.bind_in(self.client_url('task')) q.setsockopt_in(zmq.IDENTITY, b'task_in') q.bind_out(self.engine_url('task')) q.setsockopt_out(zmq.IDENTITY, b'task_out') q.connect_mon(monitor_url) q.daemon = True children.append(q) elif scheme == 'none': self.log.warning("task::using no Task scheduler") else: self.log.info("task::using Python %s Task scheduler" % scheme) self.launch_python_scheduler( 'TaskScheduler', self.get_python_scheduler_args('task', TaskScheduler, monitor_url), children, ) self.launch_broadcast_schedulers(monitor_url, children) # set unlimited HWM for all relay devices if hasattr(zmq, 'SNDHWM'): q = children[0] q.setsockopt_in(zmq.RCVHWM, 0) q.setsockopt_out(zmq.SNDHWM, 0) for q in children[1:]: if not hasattr(q, 'setsockopt_in'): continue q.setsockopt_in(zmq.SNDHWM, 0) q.setsockopt_in(zmq.RCVHWM, 0) q.setsockopt_out(zmq.SNDHWM, 0) q.setsockopt_out(zmq.RCVHWM, 0) q.setsockopt_mon(zmq.SNDHWM, 0)