def _autodetect_language_servers(self): entry_points = [] try: entry_points = list(pkg_resources.iter_entry_points(EP_SPEC_V1)) except Exception: # pragma: no cover self.log.exception("Failed to load entry_points") for ep in entry_points: try: spec_finder = ep.load() # type: SpecMaker except Exception as err: # pragma: no cover self.log.warn( _("Failed to load language server spec finder `{}`: \n{}"). format(ep.name, err)) continue try: specs = spec_finder(self) except Exception as err: # pragma: no cover self.log.warning( _("Failed to fetch commands from language server spec finder" "`{}`:\n{}").format(ep.name, err)) continue errors = list(LANGUAGE_SERVER_SPEC_MAP.iter_errors(specs)) if errors: # pragma: no cover self.log.warning( _("Failed to validate commands from language server spec finder" "`{}`:\n{}").format(ep.name, errors)) continue for key, spec in specs.items(): yield key, spec
class Jupyterfs(Configurable): root_manager_class = Type( config=True, default_value=LargeFileManager, help= _("the root contents manager class to use. Used by the Jupyterlab default filebrowser and elsewhere" ), klass=ContentsManager, ) resources = List( config=True, default_value=[], help=_("server-side definitions of fsspec resources for jupyter-fs"), # trait=Dict(traits={"name": Unicode, "url": Unicode}), )
class ContentsManager(LoggingConfigurable): """Base class for serving files and directories. This serves any text or binary file, as well as directories, with special handling for JSON notebook documents. Most APIs take a path argument, which is always an API-style unicode path, and always refers to a directory. - unicode, not url-escaped - '/'-separated - leading and trailing '/' will be stripped - if unspecified, path defaults to '', indicating the root path. """ root_dir = Unicode('/', config=True) allow_hidden = Bool(False, config=True, help="Allow access to hidden files") notary = Instance(sign.NotebookNotary) def _notary_default(self): return sign.NotebookNotary(parent=self) hide_globs = List(Unicode(), [ u'__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~', ], config=True, help=""" Glob patterns to hide in file and directory listings. """) untitled_notebook = Unicode( _("Untitled"), config=True, help="The base name used when creating untitled notebooks.") untitled_file = Unicode( "untitled", config=True, help="The base name used when creating untitled files.") untitled_directory = Unicode( "Untitled Folder", config=True, help="The base name used when creating untitled directories.") pre_save_hook = Any(None, config=True, allow_none=True, help="""Python callable or importstring thereof To be called on a contents model prior to save. This can be used to process the structure, such as removing notebook outputs or other side effects that should not be saved. It will be called as (all arguments passed by keyword):: hook(path=path, model=model, contents_manager=self) - model: the model to be saved. Includes file contents. Modifying this dict will affect the file that is stored. - path: the API path of the save destination - contents_manager: this ContentsManager instance """) @validate('pre_save_hook') def _validate_pre_save_hook(self, proposal): value = proposal['value'] if isinstance(value, string_types): value = import_item(self.pre_save_hook) if not callable(value): raise TraitError("pre_save_hook must be callable") return value def run_pre_save_hook(self, model, path, **kwargs): """Run the pre-save hook if defined, and log errors""" if self.pre_save_hook: try: self.log.debug("Running pre-save hook on %s", path) self.pre_save_hook(model=model, path=path, contents_manager=self, **kwargs) except Exception: self.log.error("Pre-save hook failed on %s", path, exc_info=True) checkpoints_class = Type(Checkpoints, config=True) checkpoints = Instance(Checkpoints, config=True) checkpoints_kwargs = Dict(config=True) @default('checkpoints') def _default_checkpoints(self): return self.checkpoints_class(**self.checkpoints_kwargs) @default('checkpoints_kwargs') def _default_checkpoints_kwargs(self): return dict( parent=self, log=self.log, ) files_handler_class = Type( FilesHandler, klass=RequestHandler, allow_none=True, config=True, help="""handler class to use when serving raw file requests. Default is a fallback that talks to the ContentsManager API, which may be inefficient, especially for large files. Local files-based ContentsManagers can use a StaticFileHandler subclass, which will be much more efficient. Access to these files should be Authenticated. """) files_handler_params = Dict( config=True, help="""Extra parameters to pass to files_handler_class. For example, StaticFileHandlers generally expect a `path` argument specifying the root directory from which to serve files. """) def get_extra_handlers(self): """Return additional handlers Default: self.files_handler_class on /files/.* """ handlers = [] if self.files_handler_class: handlers.append((r"/files/(.*)", self.files_handler_class, self.files_handler_params)) return handlers # ContentsManager API part 1: methods that must be # implemented in subclasses. def dir_exists(self, path): """Does a directory exist at the given path? Like os.path.isdir Override this method in subclasses. Parameters ---------- path : string The path to check Returns ------- exists : bool Whether the path does indeed exist. """ raise NotImplementedError def is_hidden(self, path): """Is path a hidden directory or file? Parameters ---------- path : string The path to check. This is an API path (`/` separated, relative to root dir). Returns ------- hidden : bool Whether the path is hidden. """ raise NotImplementedError def file_exists(self, path=''): """Does a file exist at the given path? Like os.path.isfile Override this method in subclasses. Parameters ---------- path : string The API path of a file to check for. Returns ------- exists : bool Whether the file exists. """ raise NotImplementedError('must be implemented in a subclass') def exists(self, path): """Does a file or directory exist at the given path? Like os.path.exists Parameters ---------- path : string The API path of a file or directory to check for. Returns ------- exists : bool Whether the target exists. """ return self.file_exists(path) or self.dir_exists(path) def get(self, path, content=True, type=None, format=None): """Get a file or directory model.""" raise NotImplementedError('must be implemented in a subclass') def save(self, model, path): """ Save a file or directory model to path. Should return the saved model with no content. Save implementations should call self.run_pre_save_hook(model=model, path=path) prior to writing any data. """ raise NotImplementedError('must be implemented in a subclass') def delete_file(self, path): """Delete the file or directory at path.""" raise NotImplementedError('must be implemented in a subclass') def rename_file(self, old_path, new_path): """Rename a file or directory.""" raise NotImplementedError('must be implemented in a subclass') # ContentsManager API part 2: methods that have useable default # implementations, but can be overridden in subclasses. def delete(self, path): """Delete a file/directory and any associated checkpoints.""" path = path.strip('/') if not path: raise HTTPError(400, "Can't delete root") self.delete_file(path) self.checkpoints.delete_all_checkpoints(path) def rename(self, old_path, new_path): """Rename a file and any checkpoints associated with that file.""" self.rename_file(old_path, new_path) self.checkpoints.rename_all_checkpoints(old_path, new_path) def update(self, model, path): """Update the file's path For use in PATCH requests, to enable renaming a file without re-uploading its contents. Only used for renaming at the moment. """ path = path.strip('/') new_path = model.get('path', path).strip('/') if path != new_path: self.rename(path, new_path) model = self.get(new_path, content=False) return model def info_string(self): return "Serving contents" def get_kernel_path(self, path, model=None): """Return the API path for the kernel KernelManagers can turn this value into a filesystem path, or ignore it altogether. The default value here will start kernels in the directory of the notebook server. FileContentsManager overrides this to use the directory containing the notebook. """ return '' def increment_filename(self, filename, path='', insert=''): """Increment a filename until it is unique. Parameters ---------- filename : unicode The name of a file, including extension path : unicode The API path of the target's directory insert: unicode The characters to insert after the base filename Returns ------- name : unicode A filename that is unique, based on the input filename. """ # Extract the full suffix from the filename (e.g. .tar.gz) path = path.strip('/') basename, dot, ext = filename.rpartition('.') if ext != 'ipynb': basename, dot, ext = filename.partition('.') suffix = dot + ext for i in itertools.count(): if i: insert_i = '{}{}'.format(insert, i) else: insert_i = '' name = u'{basename}{insert}{suffix}'.format(basename=basename, insert=insert_i, suffix=suffix) if not self.exists(u'{}/{}'.format(path, name)): break return name def validate_notebook_model(self, model): """Add failed-validation message to model""" try: validate_nb(model['content']) except ValidationError as e: model['message'] = u'Notebook validation failed: {}:\n{}'.format( e.message, json.dumps(e.instance, indent=1, default=lambda obj: '<UNKNOWN>'), ) return model def new_untitled(self, path='', type='', ext=''): """Create a new untitled file or directory in path path must be a directory File extension can be specified. Use `new` to create files with a fully specified path (including filename). """ path = path.strip('/') if not self.dir_exists(path): raise HTTPError(404, 'No such directory: %s' % path) model = {} if type: model['type'] = type if ext == '.ipynb': model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') insert = '' if model['type'] == 'directory': untitled = self.untitled_directory insert = ' ' elif model['type'] == 'notebook': untitled = self.untitled_notebook ext = '.ipynb' elif model['type'] == 'file': untitled = self.untitled_file else: raise HTTPError(400, "Unexpected model type: %r" % model['type']) name = self.increment_filename(untitled + ext, path, insert=insert) path = u'{0}/{1}'.format(path, name) return self.new(model, path) def new(self, model=None, path=''): """Create a new file or directory and return its model with no content. To create a new untitled entity in a directory, use `new_untitled`. """ path = path.strip('/') if model is None: model = {} if path.endswith('.ipynb'): model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') # no content, not a directory, so fill out new-file model if 'content' not in model and model['type'] != 'directory': if model['type'] == 'notebook': model['content'] = new_notebook() model['format'] = 'json' else: model['content'] = '' model['type'] = 'file' model['format'] = 'text' model = self.save(model, path) return model def copy(self, from_path, to_path=None): """Copy an existing file and return its new model. If to_path not specified, it will be the parent directory of from_path. If to_path is a directory, filename will increment `from_path-Copy#.ext`. Considering multi-part extensions, the Copy# part will be placed before the first dot for all the extensions except `ipynb`. For easier manual searching in case of notebooks, the Copy# part will be placed before the last dot. from_path must be a full path to a file. """ path = from_path.strip('/') if to_path is not None: to_path = to_path.strip('/') if '/' in path: from_dir, from_name = path.rsplit('/', 1) else: from_dir = '' from_name = path model = self.get(path) model.pop('path', None) model.pop('name', None) if model['type'] == 'directory': raise HTTPError(400, "Can't copy directories") if to_path is None: to_path = from_dir if self.dir_exists(to_path): name = copy_pat.sub(u'.', from_name) to_name = self.increment_filename(name, to_path, insert='-Copy') to_path = u'{0}/{1}'.format(to_path, to_name) model = self.save(model, to_path) return model def log_info(self): self.log.info(self.info_string()) def trust_notebook(self, path): """Explicitly trust a notebook Parameters ---------- path : string The path of a notebook """ model = self.get(path) nb = model['content'] self.log.warning("Trusting notebook %s", path) self.notary.mark_cells(nb, True) self.check_and_sign(nb, path) def check_and_sign(self, nb, path=''): """Check for trusted cells, and sign the notebook. Called as a part of saving notebooks. Parameters ---------- nb : dict The notebook dict path : string The notebook's path (for logging) """ if self.notary.check_cells(nb): self.notary.sign(nb) else: self.log.warning("Notebook %s is not trusted", path) def mark_trusted_cells(self, nb, path=''): """Mark cells as trusted if the notebook signature matches. Called as a part of loading notebooks. Parameters ---------- nb : dict The notebook object (in current nbformat) path : string The notebook's path (for logging) """ trusted = self.notary.check_signature(nb) if not trusted: self.log.warning("Notebook %s is not trusted", path) self.notary.mark_cells(nb, trusted) def should_list(self, name): """Should this file/directory name be displayed in a listing?""" return not any(fnmatch(name, glob) for glob in self.hide_globs) # Part 3: Checkpoints API def create_checkpoint(self, path): """Create a checkpoint.""" return self.checkpoints.create_checkpoint(self, path) def restore_checkpoint(self, checkpoint_id, path): """ Restore a checkpoint. """ self.checkpoints.restore_checkpoint(self, checkpoint_id, path) def list_checkpoints(self, path): return self.checkpoints.list_checkpoints(path) def delete_checkpoint(self, checkpoint_id, path): return self.checkpoints.delete_checkpoint(checkpoint_id, path)
def info_string(self): return _("Serving notebooks from local directory: %s") % self.root_dir
class LanguageServerManager(LanguageServerManagerAPI): """ Manage language servers """ language_servers = Schema( validator=LANGUAGE_SERVER_SPEC_MAP, help=_("a dict of language server specs, keyed by implementation"), ).tag(config=True) # type: KeyedLanguageServerSpecs autodetect = Bool( True, help=_( "try to find known language servers in sys.prefix (and elsewhere)") ).tag(config=True) # type: bool sessions = Dict_( trait=Instance(LanguageServerSession), default_value={}, help="sessions keyed by languages served", ) # type: Dict[Tuple[Text], LanguageServerSession] all_listeners = List_(trait=LoadableCallable).tag(config=True) server_listeners = List_(trait=LoadableCallable).tag(config=True) client_listeners = List_(trait=LoadableCallable).tag(config=True) @default("language_servers") def _default_language_servers(self): return {} def __init__(self, **kwargs): """ Before starting, perform all necessary configuration """ super().__init__(**kwargs) def initialize(self, *args, **kwargs): self.init_language_servers() self.init_listeners() self.init_sessions() def init_language_servers(self) -> None: """ determine the final language server configuration. """ language_servers = {} # type: KeyedLanguageServerSpecs # copy the language servers before anybody monkeys with them language_servers_from_config = dict(self.language_servers) if self.autodetect: language_servers.update(self._autodetect_language_servers()) # restore config language_servers.update(language_servers_from_config) # coalesce the servers, allowing a user to opt-out by specifying `[]` self.language_servers = { key: spec for key, spec in language_servers.items() if spec.get("argv") and spec.get("languages") } def init_sessions(self): """ create, but do not initialize all sessions """ sessions = {} for spec in self.language_servers.values(): sessions[tuple(sorted(spec["languages"]))] = LanguageServerSession( spec=spec, parent=self) self.sessions = sessions def init_listeners(self): """ register traitlets-configured listeners """ scopes = { MessageScope.ALL: [self.all_listeners, EP_LISTENER_ALL_V1], MessageScope.CLIENT: [self.client_listeners, EP_LISTENER_CLIENT_V1], MessageScope.SERVER: [self.server_listeners, EP_LISTENER_SERVER_V1], } for scope, trt_ep in scopes.items(): listeners, entry_point = trt_ep for ept in pkg_resources.iter_entry_points( entry_point): # pragma: no cover try: listeners.append(entry_point.load()) except Exception as err: self.log.warning("Failed to load entry point %s: %s", ept, err) for listener in listeners: self.__class__.register_message_listener( scope=scope.value)(listener) def subscribe(self, handler): """ subscribe a handler to session, or sta """ sessions = [] for languages, candidate_session in self.sessions.items(): if handler.language in languages: sessions.append(candidate_session) if sessions: for session in sessions: session.handlers = set([handler]) | session.handlers async def on_client_message(self, message, handler): await self.wait_for_listeners(MessageScope.CLIENT, message, [handler.language]) for session in self.sessions_for_handler(handler): session.write(message) async def on_server_message(self, message, session): await self.wait_for_listeners(MessageScope.SERVER, message, session.spec["languages"]) for handler in session.handlers: handler.write_message(message) def unsubscribe(self, handler): for session in self.sessions_for_handler(handler): session.handlers = [h for h in session.handlers if h != handler] def sessions_for_handler(self, handler): for session in self.sessions.values(): if handler in session.handlers: yield session def _autodetect_language_servers(self): entry_points = [] try: entry_points = list(pkg_resources.iter_entry_points(EP_SPEC_V1)) except Exception: # pragma: no cover self.log.exception("Failed to load entry_points") for ep in entry_points: try: spec_finder = ep.load() # type: SpecMaker except Exception as err: # pragma: no cover self.log.warn( _("Failed to load language server spec finder `{}`: \n{}"). format(ep.name, err)) continue try: specs = spec_finder(self) except Exception as err: # pragma: no cover self.log.warning( _("Failed to fetch commands from language server spec finder" "`{}`:\n{}").format(ep.name, err)) continue errors = list(LANGUAGE_SERVER_SPEC_MAP.iter_errors(specs)) if errors: # pragma: no cover self.log.warning( _("Failed to validate commands from language server spec finder" "`{}`:\n{}").format(ep.name, errors)) continue for key, spec in specs.items(): yield key, spec
class LanguageServerManagerAPI(LoggingConfigurable): """ Public API that can be used for python-based spec finders """ nodejs = Unicode(help=_("path to nodejs executable")).tag(config=True) node_roots = List_([], help=_("absolute paths in which to seek node_modules")).tag( config=True ) extra_node_roots = List_( [], help=_("additional absolute paths to seek node_modules first") ).tag(config=True) def find_node_module(self, *path_frag): """ look through the node_module roots to find the given node module """ all_roots = self.extra_node_roots + self.node_roots found = None for candidate_root in all_roots: candidate = pathlib.Path(candidate_root, "node_modules", *path_frag) self.log.debug("Checking for %s", candidate) if candidate.exists(): found = str(candidate) break if found is None: # pragma: no cover self.log.debug( "%s not found in node_modules of %s", pathlib.Path(path_frag), all_roots ) return found @default("nodejs") def _default_nodejs(self): return ( shutil.which("node") or shutil.which("nodejs") or shutil.which("nodejs.exe") ) @default("node_roots") def _default_node_roots(self): """ get the "usual suspects" for where `node_modules` may be found - where this was launch (usually the same as NotebookApp.notebook_dir) - the JupyterLab staging folder (if available) - wherever conda puts it - wherever some other conventions put it """ # check where the server was started first roots = [pathlib.Path.cwd()] # try jupyterlab staging next try: from jupyterlab import commands roots += [pathlib.Path(commands.get_app_dir()) / "staging"] except ImportError: # pragma: no cover pass # conda puts stuff in $PREFIX/lib on POSIX systems roots += [pathlib.Path(sys.prefix) / "lib"] # ... but right in %PREFIX% on nt roots += [pathlib.Path(sys.prefix)] return roots
class LanguageServerManager(LanguageServerManagerAPI): """ Manage language servers """ language_servers = Dict_( trait=Dict_, default_value=[], help=_("a dict of language server specs, keyed by implementation"), ).tag( config=True ) # type: KeyedLanguageServerSpecs autodetect = Bool( True, help=_("try to find known language servers in sys.prefix (and elsewhere)") ).tag( config=True ) # type: bool sessions = Dict_( trait=Instance(LanguageServerSession), default_value={}, help="sessions keyed by languages served", ) # type: typing.Dict[typing.Tuple[typing.Text], LanguageServerSession] def __init__(self, **kwargs): """ Before starting, perform all necessary configuration """ super().__init__(**kwargs) def initialize(self, *args, **kwargs): self.init_language_servers() self.init_sessions() def init_language_servers(self) -> None: """ determine the final language server configuration. """ language_servers = {} # type: KeyedLanguageServerSpecs # copy the language servers before anybody monkeys with them language_servers_from_config = dict(self.language_servers) if self.autodetect: language_servers.update(self._autodetect_language_servers()) # restore config language_servers.update(language_servers_from_config) # coalesce the servers, allowing a user to opt-out by specifying `[]` self.language_servers = { key: spec for key, spec in language_servers.items() if spec.get("argv") and spec.get("languages") } def init_sessions(self): """ create, but do not initialize all sessions """ sessions = {} for spec in self.language_servers.values(): sessions[tuple(sorted(spec["languages"]))] = LanguageServerSession( argv=spec["argv"], languages=spec["languages"] ) self.sessions = sessions def subscribe(self, handler): """ subscribe a handler to session, or sta """ sessions = [] for languages, candidate_session in self.sessions.items(): if handler.language in languages: sessions.append(candidate_session) if sessions: for session in sessions: session.handlers = set([handler]) | session.handlers def on_message(self, message, handler): for session in self.sessions_for_handler(handler): session.write(message) def unsubscribe(self, handler): for session in self.sessions_for_handler(handler): session.handlers = [h for h in session.handlers if h != handler] def sessions_for_handler(self, handler): for session in self.sessions.values(): if handler in session.handlers: yield session def _autodetect_language_servers(self): entry_points = [] try: entry_points = list(pkg_resources.iter_entry_points(EP_SPEC_V0)) except Exception: # pragma: no cover self.log.exception("Failed to load entry_points") for ep in entry_points: try: spec_finder = ep.load() # type: SpecMaker except Exception as err: # pragma: no cover self.log.warn( _("Failed to load language server spec finder `{}`: \n{}").format( ep.name, err ) ) continue try: for key, spec in spec_finder(self).items(): yield key, spec except Exception as err: # pragma: no cover self.log.warning( _( "Failed to fetch commands from language server spec finder" "`{}`:\n{}" ).format(ep.name, err) ) continue
class GenericContentsManager(ContentsManager, HasTraits): # This makes the checkpoints get saved on this directory root_dir = Unicode("./", config=True) hide_globs = List(Unicode(), [ u'__pycache__', '*.pyc', '*.pyo', '.DS_Store', '*.so', '*.dylib', '*~', ], config=True, help=""" Glob patterns to hide in file and directory listings. """) untitled_notebook = Unicode( _("Untitled"), config=True, help="The base name used when creating untitled notebooks.") untitled_file = Unicode( "untitled", config=True, help="The base name used when creating untitled files.") untitled_directory = Unicode( "Untitled Folder", config=True, help="The base name used when creating untitled directories.") def __init__(self, *args, **kwargs): super(GenericContentsManager, self).__init__(*args, **kwargs) fs = MongoFS() def _checkpoints_class_default(self): return GenericFileCheckpoints def do_error(self, msg, code=500): raise HTTPError(code, msg) def no_such_entity(self, path): self.do_error("No such entity: [{path}]".format(path=path), 404) def already_exists(self, path): thing = "File" if self.file_exists(path) else "Directory" self.do_error( u"{thing} already exists: [{path}]".format(thing=thing, path=path), 409) def guess_type(self, path, allow_directory=True): """ Guess the type of a file. If allow_directory is False, don't consider the possibility that the file is a directory. Parameters ---------- obj: s3.Object or string """ if path.endswith(".ipynb"): return "notebook" elif allow_directory and self.dir_exists(path): return "directory" else: return "file" def file_exists(self, path): # Does a file exist at the given path? self.log.debug("MongoContents.GenericManager.file_exists: ('%s')", path) return self.fs.isfile(path) def dir_exists(self, path): # Does a directory exist at the given path? self.log.debug("MongoContents.GenericManager.dir_exists: path('%s')", path) return self.fs.isdir(path) or self.fs.isdir(path.strip("/")) def get(self, path, content=True, type=None, format=None): # Get a file or directory model. self.log.debug( "MongoContents.GenericManager.get] path('%s') type(%s) format(%s)", path, type, format) path = path.strip('/') print("get called") if type is None: type = self.guess_type(path) print(type) try: func = { "directory": self._get_directory, "notebook": self._get_notebook, "file": self._get_file, }[type] except KeyError: raise ValueError("Unknown type passed: '{}'".format(type)) return func(path=path, content=content, format=format) def _get_directory(self, path, content=True, format=None): self.log.debug( "MongoContents.GenericManager.get_directory: path('%s') content(%s) format(%s)", path, content, format) print( "MongoContents.GenericManager.get_directory: path('%s') content(%s) format(%s)", path, content, format) return self._directory_model_from_path(path, content=content) def _get_notebook(self, path, content=True, format=None): self.log.debug( "MongoContents.GenericManager.get_notebook: path('%s') type(%s) format(%s)", path, content, format) return self._notebook_model_from_path(path, content=content, format=format) def _get_file(self, path, content=True, format=None): self.log.debug( "MongoContents.GenericManager.get_file: path('%s') type(%s) format(%s)", path, content, format) return self._file_model_from_path(path, content=content, format=format) def _directory_model_from_path(self, path, content=False): print("directory model called", path) self.log.debug( "MongoContents.GenericManager._directory_model_from_path: path('%s') type(%s)", path, content) model = base_directory_model(path) if content: if not self.dir_exists(path): self.no_such_entity(path) model["format"] = "json" dir_content = self.fs.ls(path=path) model["content"] = self._convert_file_records(dir_content) print(model) return model def _notebook_model_from_path(self, path, content=False, format=None): """ Build a notebook model from database record. """ model = base_model(path) model["type"] = "notebook" # if self.fs.isfile(path): # model["last_modified"] = model["created"] = self.fs.lstat(path)["ST_MTIME"] # else: # model["last_modified"] = model["created"] = DUMMY_CREATED_DATE model["last_modified"] = model["created"] = DUMMY_CREATED_DATE if content: if not self.fs.isfile(path): self.no_such_entity(path) file_content = self.fs.read(path) nb_content = reads(file_content, as_version=NBFORMAT_VERSION) self.mark_trusted_cells(nb_content, path) model["format"] = "json" model["content"] = nb_content self.validate_notebook_model(model) return model def _file_model_from_path(self, path, content=False, format=None): """ Build a file model from database record. """ model = base_model(path) model["type"] = "file" if self.fs.isfile(path): model["last_modified"] = model["created"] = self.fs.lstat( path)["ST_MTIME"] else: model["last_modified"] = model["created"] = DUMMY_CREATED_DATE if content: try: content = self.fs.read(path) except NoSuchFile as e: self.no_such_entity(e.path) except GenericFSError as e: self.do_error(str(e), 500) model["format"] = format or "text" model["content"] = content model["mimetype"] = mimetypes.guess_type(path)[0] or "text/plain" if format == "base64": model["format"] = format or "base64" from base64 import b64decode model["content"] = b64decode(content) return model def _convert_file_records(self, paths): """ Applies _notebook_model_from_s3_path or _file_model_from_s3_path to each entry of `paths`, depending on the result of `guess_type`. """ ret = [] print(paths) for path in paths: # path = self.fs.remove_prefix(path, self.prefix) # Remove bucket prefix from paths # if os.path.basename(path) == self.fs.dir_keep_file: # continue type_ = self.guess_type(path, allow_directory=True) if type_ == "notebook": ret.append(self._notebook_model_from_path(path, False)) elif type_ == "file": ret.append(self._file_model_from_path(path, False, None)) elif type_ == "directory": ret.append(self._directory_model_from_path(path, False)) else: self.do_error( "Unknown file type %s for file '%s'" % (type_, path), 500) return ret def save(self, model, path): print("Save called") """Save a file or directory model to path. """ self.log.debug("MongoContents.GenericManager: save %s: '%s'", model, path) if "type" not in model: self.do_error("No model type provided", 400) if "content" not in model and model["type"] != "directory": self.do_error("No file content provided", 400) if model["type"] not in ("file", "directory", "notebook"): self.do_error("Unhandled contents type: %s" % model["type"], 400) try: if model["type"] == "notebook": validation_message = self._save_notebook(model, path) elif model["type"] == "file": validation_message = self._save_file(model, path) else: validation_message = self._save_directory(path) except Exception as e: self.log.error("Error while saving file: %s %s", path, e, exc_info=True) self.do_error( "Unexpected error while saving file: %s %s" % (path, e), 500) model = self.get(path, type=model["type"], content=False) if validation_message is not None: model["message"] = validation_message return model def new_untitled(self, path='', type='', ext=''): """Create a new untitled file or directory in path path must be a directory File extension can be specified. Use `new` to create files with a fully specified path (including filename). """ #path = path.strip('/') print(path) if not self.dir_exists(path): raise HTTPError(404, 'No such directory: here %s' % path) model = {} if type: model['type'] = type if ext == '.ipynb': model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') insert = '' if model['type'] == 'directory': untitled = self.untitled_directory insert = ' ' elif model['type'] == 'notebook': untitled = self.untitled_notebook ext = '.ipynb' elif model['type'] == 'file': untitled = self.untitled_file else: raise HTTPError(400, "Unexpected model type: %r" % model['type']) name = self.increment_filename(untitled + ext, path, insert=insert) path = u'{0}/{1}'.format(path, name) return self.new(model, path) def new(self, model=None, path=''): """Create a new file or directory and return its model with no content. To create a new untitled entity in a directory, use `new_untitled`. """ path = path.strip('/') if model is None: model = {} if path.endswith('.ipynb'): model.setdefault('type', 'notebook') else: model.setdefault('type', 'file') # no content, not a directory, so fill out new-file model if 'content' not in model and model['type'] != 'directory': if model['type'] == 'notebook': model['content'] = new_notebook() model['format'] = 'json' else: model['content'] = '' model['type'] = 'file' model['format'] = 'text' model = self.save(model, path) return model def increment_filename(self, filename, path='', insert=''): """Increment a filename until it is unique. Parameters ---------- filename : unicode The name of a file, including extension path : unicode The API path of the target's directory insert: unicode The characters to insert after the base filename Returns ------- name : unicode A filename that is unique, based on the input filename. """ # Extract the full suffix from the filename (e.g. .tar.gz) path = path.strip('/') basename, dot, ext = filename.partition('.') suffix = dot + ext for i in itertools.count(): if i: insert_i = '{}{}'.format(insert, i) else: insert_i = '' name = u'{basename}{insert}{suffix}'.format(basename=basename, insert=insert_i, suffix=suffix) if not self.exists(u'{}/{}'.format(path, name)): break return name def _save_notebook(self, model, path): print("Save Notebook ") nb_contents = from_dict(model['content']) self.check_and_sign(nb_contents, path) file_contents = model["content"] self.fs.write(path, file_contents) self.validate_notebook_model(model) return model.get("message") # def update(self, model, path): # """Update the file's path # For use in PATCH requests, to enable renaming a file without # re-uploading its contents. Only used for renaming at the moment. # """ # print("update call") # return model # def _save_notebook(self, model, path): # nb_contents = from_dict(model['content']) # self.check_and_sign(nb_contents, path) # file_contents = json.dumps(model["content"]) # self.fs.write(path, file_contents) # self.validate_notebook_model(model) # return model.get("message") def _save_file(self, model, path): file_contents = model["content"] file_format = model.get('format') self.fs.write(path, file_contents, file_format) def _save_directory(self, path): self.fs.mkdir(path) ############## Working################3 def rename_file(self, old_path, new_path): """Rename a file or directory. NOTE: This method is unfortunately named on the base class. It actually moves a file or a directory. """ self.log.debug( "MongoContents.GenericManager: Init rename of '%s' to '%s'", old_path, new_path) if self.file_exists(new_path) or self.dir_exists(new_path): self.already_exists(new_path) elif self.file_exists(old_path) or self.dir_exists(old_path): self.log.debug( "MongoContents.GenericManager: Actually renaming '%s' to '%s'", old_path, new_path) self.fs.mv(old_path, new_path) else: self.no_such_entity(old_path) def delete_file(self, path): """Delete the file or directory at path. """ self.log.debug("MongoContents.GenericManager: delete_file '%s'", path) if self.file_exists(path): self.fs.rm(path, type="file") elif self.dir_exists(path): self.fs.rm(path, type="dir") else: self.no_such_entity(path) def is_hidden(self, path): """Is path a hidden directory or file? """ self.log.debug("MongoContents.GenericManager: is_hidden '%s'", path) return False
def info_string(self): return _("Serving notebooks from local directory: %s") % self.root_dir
class LanguageServerManager(LanguageServerManagerAPI): """Manage language servers""" language_servers = Schema( validator=LANGUAGE_SERVER_SPEC_MAP, help=_("a dict of language server specs, keyed by implementation"), ).tag(config=True) # type: KeyedLanguageServerSpecs autodetect = Bool( True, help=_( "try to find known language servers in sys.prefix (and elsewhere)") ).tag(config=True) # type: bool sessions = Dict_( trait=Instance(LanguageServerSession), default_value={}, help="sessions keyed by language server name", ) # type: Dict[Tuple[Text], LanguageServerSession] virtual_documents_dir = Unicode( help="""Path to virtual documents relative to the content manager root directory. Its default value can be set with JP_LSP_VIRTUAL_DIR and fallback to '.virtual_documents'. """).tag(config=True) all_listeners = List_(trait=LoadableCallable).tag(config=True) server_listeners = List_(trait=LoadableCallable).tag(config=True) client_listeners = List_(trait=LoadableCallable).tag(config=True) @default("language_servers") def _default_language_servers(self): return {} @default("virtual_documents_dir") def _default_virtual_documents_dir(self): return os.getenv("JP_LSP_VIRTUAL_DIR", ".virtual_documents") def __init__(self, **kwargs): """Before starting, perform all necessary configuration""" super().__init__(**kwargs) def initialize(self, *args, **kwargs): self.init_language_servers() self.init_listeners() self.init_sessions() def init_language_servers(self) -> None: """determine the final language server configuration.""" language_servers = {} # type: KeyedLanguageServerSpecs # copy the language servers before anybody monkeys with them language_servers_from_config = dict(self.language_servers) if self.autodetect: language_servers.update(self._autodetect_language_servers()) # restore config language_servers.update(language_servers_from_config) # coalesce the servers, allowing a user to opt-out by specifying `[]` self.language_servers = { key: spec for key, spec in language_servers.items() if spec.get("argv") } def init_sessions(self): """create, but do not initialize all sessions""" sessions = {} for language_server, spec in self.language_servers.items(): sessions[language_server] = LanguageServerSession( language_server=language_server, spec=spec, parent=self) self.sessions = sessions def init_listeners(self): """register traitlets-configured listeners""" scopes = { MessageScope.ALL: [self.all_listeners, EP_LISTENER_ALL_V1], MessageScope.CLIENT: [self.client_listeners, EP_LISTENER_CLIENT_V1], MessageScope.SERVER: [self.server_listeners, EP_LISTENER_SERVER_V1], } for scope, trt_ep in scopes.items(): listeners, entry_point = trt_ep for ep_name, ept in entrypoints.get_group_named( entry_point).items(): # pragma: no cover try: listeners.append(ept.load()) except Exception as err: self.log.warning("Failed to load entry point %s: %s", ep_name, err) for listener in listeners: self.__class__.register_message_listener( scope=scope.value)(listener) def subscribe(self, handler): """subscribe a handler to session, or sta""" session = self.sessions.get(handler.language_server) if session is None: self.log.error( "[{}] no session: handler subscription failed".format( handler.language_server)) return session.handlers = set([handler]) | session.handlers async def on_client_message(self, message, handler): await self.wait_for_listeners(MessageScope.CLIENT, message, handler.language_server) session = self.sessions.get(handler.language_server) if session is None: self.log.error("[{}] no session: client message dropped".format( handler.language_server)) return session.write(message) async def on_server_message(self, message, session): language_servers = [ ls_key for ls_key, sess in self.sessions.items() if sess == session ] for language_servers in language_servers: await self.wait_for_listeners(MessageScope.SERVER, message, language_servers) for handler in session.handlers: handler.write_message(message) def unsubscribe(self, handler): session = self.sessions.get(handler.language_server) if session is None: self.log.error( "[{}] no session: handler unsubscription failed".format( handler.language_server)) return session.handlers = [h for h in session.handlers if h != handler] def _autodetect_language_servers(self): entry_points = [] try: entry_points = entrypoints.get_group_named(EP_SPEC_V1) except Exception: # pragma: no cover self.log.exception("Failed to load entry_points") for ep_name, ep in entry_points.items(): try: spec_finder = ep.load() # type: SpecMaker except Exception as err: # pragma: no cover self.log.warn( _("Failed to load language server spec finder `{}`: \n{}"). format(ep_name, err)) continue try: specs = spec_finder(self) except Exception as err: # pragma: no cover self.log.warning( _("Failed to fetch commands from language server spec finder" "`{}`:\n{}").format(ep.name, err)) traceback.print_exc() continue errors = list(LANGUAGE_SERVER_SPEC_MAP.iter_errors(specs)) if errors: # pragma: no cover self.log.warning( _("Failed to validate commands from language server spec finder" "`{}`:\n{}").format(ep.name, errors)) continue for key, spec in specs.items(): yield key, spec