class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md Based on: https://github.com/palantir/python-language-server/blob/develop/pyls/python_ls.py """ def __init__(self, rx, tx): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) return self.workspaces.get(workspace_uri, self.workspace) def capabilities(self): server_capabilities = { "codeActionProvider": False, # "codeLensProvider": { # "resolveProvider": False, # We may need to make this configurable # }, # "completionProvider": { # "resolveProvider": False, # We know everything ahead of time # "triggerCharacters": ["."], # }, "documentFormattingProvider": False, "documentHighlightProvider": False, "documentRangeFormattingProvider": False, "documentSymbolProvider": False, "definitionProvider": False, "executeCommandProvider": { "commands": [] }, "hoverProvider": False, "referencesProvider": False, "renameProvider": False, "foldingRangeProvider": False, # "signatureHelpProvider": { # 'triggerCharacters': ['(', ',', '='] # }, "textDocumentSync": { "change": lsp.TextDocumentSyncKind.INCREMENTAL, "save": { "includeText": True, }, "openClose": True, }, "workspace": { "workspaceFolders": { "supported": True, "changeNotifications": True } }, } log.info("Server capabilities: %s", server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug( "Language server initialized with:\n processId: %s\n rootUri: %s\n rootPath: %s\n initializationOptions: %s", processId, rootUri, rootPath, initializationOptions, ) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else "" self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.config = config.Config( rootUri, initializationOptions or {}, processId, _kwargs.get("capabilities", {}), ) self.workspace = Workspace(rootUri, self._endpoint, self.config) self.workspaces[rootUri] = self.workspace if processId not in (None, -1, 0) and self.watching_thread is None: def watch_parent_process(pid): # exit when the given pid is not alive while True: if not _utils.is_process_alive(pid): # Note: just exit since the parent process already # exited. log.info( "Force-quit process: %s", os.getpid(), ) os._exit(0) time.sleep(PARENT_PROCESS_WATCH_INTERVAL) self.watching_thread = threading.Thread( target=watch_parent_process, args=(processId, )) self.watching_thread.daemon = True self.watching_thread.start() # Get our capabilities return {"capabilities": self.capabilities()} def m_initialized(self, **_kwargs): pass def lint(self, doc_uri, is_saved): raise NotImplementedError("Subclasses must override.") def m_text_document__did_close(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument["uri"]) workspace.rm_document(textDocument["uri"]) def m_text_document__did_open(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument["uri"]) if workspace is None: log.critical("Unable to find workspace for: %s", (textDocument, )) return workspace.put_document( textDocument["uri"], textDocument["text"], version=textDocument.get("version"), ) self.lint(textDocument["uri"], is_saved=True) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument["uri"]) if workspace is None: log.critical("Unable to find workspace for: %s", (textDocument, )) return for change in contentChanges: workspace.update_document(textDocument["uri"], change, version=textDocument.get("version")) self.lint(textDocument["uri"], is_saved=False) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument["uri"], is_saved=True) def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get("robot", {})) for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] workspace.update_config(self.config) for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, added=None, removed=None, **_kwargs): for removed_info in removed: removed_uri = removed_info["uri"] self.workspaces.pop(removed_uri) for added_info in added: added_uri = added_info["uri"] self.workspaces[added_uri] = Workspace(added_uri, self._endpoint, self.config) # Migrate documents that are on the root workspace and have a better # match now doc_uris = list(self.workspace._docs.keys()) for uri in doc_uris: doc = self.workspace._docs.pop(uri) new_workspace = self._match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False for d in changes or []: if d["uri"].endswith(PYTHON_FILE_EXTENSIONS): changed_py_files.add(d["uri"]) elif d["uri"].endswith(CONFIG_FILEs): config_changed = True if config_changed: self.config.settings.cache_clear() elif not changed_py_files: # Only externally changed python files and lint configs may result in changed diagnostics. return for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] for doc_uri in workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri, is_saved=False)
class PythonDaemon(MethodDispatcher): """Base Python Daemon with simple methods to check if a module exists, get version info and the like. To add additional methods, please create a separate class based off this and pass in the arg `--daemon-module` to `vscode_datascience_helpers.daemon`. """ def __init__(self, rx, tx): self.log = logging.getLogger("{0}.{1}".format( self.__class__.__module__, self.__class__.__name__)) self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._shutdown = False def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != "exit": # exit is the only allowed method during shutdown self.log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError self.log.info("Execute rpc method %s from %s", item, sys.executable) return super().__getitem__(item) def start(self): """Entry point for the server.""" self._shutdown = False self._jsonrpc_stream_reader.listen(self._endpoint.consume) def m_ping(self, data): """ping & pong (check if daemon is alive).""" self.log.info("pinged with %s", data) return {"pong": data} def _execute_and_capture_output(self, func): fout = io.StringIO() ferr = io.StringIO() with redirect_stdout(fout): with redirect_stderr(ferr): func() output = {} if fout.tell(): output["stdout"] = fout.getvalue() if ferr.tell(): output["stderr"] = ferr.getvalue() return output def close(self): self.log.info("Closing rpc channel") self._shutdown = True self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def m_exit(self, **_kwargs): self.close() @error_decorator def m_exec_file(self, file_name, args=[], cwd=None, env=None): args = [] if args is None else args self.log.info("Exec file %s with args %s", file_name, args) def exec_file(): self.log.info("execute file %s", file_name) runpy.run_path(file_name, globals()) with change_exec_context(args, cwd, env): return self._execute_and_capture_output(exec_file) @error_decorator def m_exec_file_observable(self, file_name, args=[], cwd=None, env=None): args = [] if args is None else args old_argv, sys.argv = sys.argv, [""] + args self.log.info("Exec file (observale) %s with args %s", file_name, args) with change_exec_context(args, cwd, env): runpy.run_path(file_name, globals()) @error_decorator def m_exec_module(self, module_name, args=[], cwd=None, env=None): args = [] if args is None else args self.log.info("Exec module %s with args %s", module_name, args) if args[-1] == "--version": return self._get_module_version(module_name, args) def exec_module(): self.log.info("execute module %s", module_name) runpy.run_module(module_name, globals(), run_name="__main__") with change_exec_context(args, cwd, env): return self._execute_and_capture_output(exec_module) @error_decorator def m_exec_module_observable(self, module_name, args=None, cwd=None, env=None): args = [] if args is None else args self.log.info("Exec module (observable) %s with args %s", module_name, args) with change_exec_context(args, cwd, env): runpy.run_module(module_name, globals(), run_name="__main__") def _get_module_version(self, module_name, args): """We handle `-m pip --version` as a special case. As this causes the current process to die. These CLI commands are meant for CLI (i.e. kill process once done). """ args = [] if args is None else args if module_name == "jupyter" and args[0] != "--version": # This means we're trying to get a version of a sub command. # E.g. python -m jupyter notebook --version. # In such cases, use the subcommand. We can ignore jupyter. module_name = args[0] try: self.log.info("getting module_version %s", module_name) m = importlib.import_module(module_name) return {"stdout": m.__version__} except Exception: return {"error": traceback.format_exc()} def m_get_executable(self): return {"path": sys.executable} def m_get_interpreter_information(self): return { "versionInfo": tuple(sys.version_info), "sysPrefix": sys.prefix, "version": sys.version, } def m_is_module_installed(self, module_name=None): return {"exists": self._is_module_installed(module_name)} def _is_module_installed(self, module_name=None): try: importlib.import_module(module_name) return True except Exception: return False @classmethod def start_daemon(cls, logging_queue_handler=None): """ Starts the daemon. """ if not issubclass(cls, PythonDaemon): raise ValueError( "Handler class must be an instance of PythonDaemon") log.info("Starting %s Daemon", cls.__name__) def on_write_stdout(output): server._endpoint.notify("output", { "source": "stdout", "out": output }) def on_write_stderr(output): server._endpoint.notify("output", { "source": "stderr", "out": output }) stdin, stdout = get_io_buffers() server = cls(stdin, stdout) redirect_output(on_write_stdout, on_write_stderr) # Set up the queue handler that'll send log messages over to the client. if logging_queue_handler is not None: logging_queue_handler.set_server(server) server.start()
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super(PythonLanguageServer, self).__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _hook(self, hook_name, doc_uri=None, **kwargs): """Calls hook_name and returns a list of results from all registered handlers""" doc = self.workspace.get_document(doc_uri) if doc_uri else None hook_handlers = self.config.plugin_manager.subset_hook_caller( hook_name, self.config.disabled_plugins) return hook_handlers(config=self.config, workspace=self.workspace, document=doc, **kwargs) def capabilities(self): server_capabilities = { 'codeActionProvider': True, 'codeLensProvider': { 'resolveProvider': False, # We may need to make this configurable }, 'completionProvider': { 'resolveProvider': False, # We know everything ahead of time 'triggerCharacters': ['.'] }, 'documentFormattingProvider': True, 'documentHighlightProvider': True, 'documentRangeFormattingProvider': True, 'documentSymbolProvider': True, 'definitionProvider': True, 'executeCommandProvider': { 'commands': flatten(self._hook('pyls_commands')) }, 'hoverProvider': True, 'referencesProvider': True, 'renameProvider': True, 'signatureHelpProvider': { 'triggerCharacters': ['(', ','] }, 'textDocumentSync': lsp.TextDocumentSyncKind.INCREMENTAL, 'experimental': merge(self._hook('pyls_experimental_capabilities')) } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else '' self.workspace = Workspace(rootUri, self._endpoint) self.config = config.Config(rootUri, initializationOptions or {}, processId) self._dispatchers = self._hook('pyls_dispatchers') self._hook('pyls_initialize') if self._check_parent_process and processId is not None: def watch_parent_process(pid): # exist when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive", pid) self.m_exit() log.debug("parent process %s is still alive", pid) threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() watching_thread = threading.Thread(target=watch_parent_process, args=(processId, )) watching_thread.daemon = True watching_thread.start() # Get our capabilities return {'capabilities': self.capabilities()} def m_initialized(self, **_kwargs): pass def code_actions(self, doc_uri, range, context): return flatten( self._hook('pyls_code_actions', doc_uri, range=range, context=context)) def code_lens(self, doc_uri): return flatten(self._hook('pyls_code_lens', doc_uri)) def completions(self, doc_uri, position): completions = self._hook('pyls_completions', doc_uri, position=position) return {'isIncomplete': False, 'items': flatten(completions)} def definitions(self, doc_uri, position): return flatten( self._hook('pyls_definitions', doc_uri, position=position)) def document_symbols(self, doc_uri): return flatten(self._hook('pyls_document_symbols', doc_uri)) def execute_command(self, command, arguments): return self._hook('pyls_execute_command', command=command, arguments=arguments) def format_document(self, doc_uri): return self._hook('pyls_format_document', doc_uri) def format_range(self, doc_uri, range): return self._hook('pyls_format_range', doc_uri, range=range) def highlight(self, doc_uri, position): return flatten( self._hook('pyls_document_highlight', doc_uri, position=position)) or None def hover(self, doc_uri, position): return self._hook('pyls_hover', doc_uri, position=position) or { 'contents': '' } @_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') def lint(self, doc_uri): # Since we're debounced, the document may no longer be open if doc_uri in self.workspace.documents: self.workspace.publish_diagnostics( doc_uri, flatten(self._hook('pyls_lint', doc_uri))) def references(self, doc_uri, position, exclude_declaration): return flatten( self._hook('pyls_references', doc_uri, position=position, exclude_declaration=exclude_declaration)) def rename(self, doc_uri, position, new_name): return self._hook('pyls_rename', doc_uri, position=position, new_name=new_name) def signature_help(self, doc_uri, position): return self._hook('pyls_signature_help', doc_uri, position=position) def m_text_document__did_close(self, textDocument=None, **_kwargs): self.workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): self.workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) self._hook('pyls_document_did_open', textDocument['uri']) self.lint(textDocument['uri']) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): for change in contentChanges: self.workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) self.lint(textDocument['uri']) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument['uri']) def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): return self.code_actions(textDocument['uri'], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): return self.code_lens(textDocument['uri']) def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): return self.completions(textDocument['uri'], position) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): return self.definitions(textDocument['uri'], position) def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): return self.highlight(textDocument['uri'], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): return self.hover(textDocument['uri'], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): return self.document_symbols(textDocument['uri']) def m_text_document__formatting(self, textDocument=None, _options=None, **_kwargs): # For now we're ignoring formatting options. return self.format_document(textDocument['uri']) def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): return self.rename(textDocument['uri'], position, newName) def m_text_document__range_formatting(self, textDocument=None, range=None, _options=None, **_kwargs): # Again, we'll ignore formatting options for now. return self.format_range(textDocument['uri'], range) def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): exclude_declaration = not context['includeDeclaration'] return self.references(textDocument['uri'], position, exclude_declaration) def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): return self.signature_help(textDocument['uri'], position) def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get('pyls', {})) for doc_uri in self.workspace.documents: self.lint(doc_uri) def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set(d['uri'] for d in changes if d['uri'].endswith(PYTHON_FILE_EXTENSIONS)) # Only externally changed python files and lint configs may result in changed diagnostics. if not changed_py_files: return # TODO: We currently don't cache settings therefor we can just lint again. # Here would be the right point to update the settings after a change to config files. for doc_uri in self.workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri) def m_workspace__execute_command(self, command=None, arguments=None): return self.execute_command(command, arguments)
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError return super(PythonLanguageServer, self).__getitem__(item) def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def capabilities(self): from . import mypy_server is_patched_mypy = mypy_server.is_patched_mypy() if not is_patched_mypy: log.info( 'Using non-patched mypy, rich language features not available.' ) python_38 = sys.version_info >= (3, 8) if not python_38: log.info( 'Using Python before 3.8, rich language features not available.' ) rich_analysis_available = is_patched_mypy and python_38 server_capabilities = { 'definitionProvider': rich_analysis_available, 'hoverProvider': rich_analysis_available, 'textDocumentSync': lsp.TextDocumentSyncKind.INCREMENTAL } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else '' self.workspace = Workspace(rootUri, self._endpoint) self.config = config.Config(rootUri, initializationOptions or {}, processId, _kwargs.get('capabilities', {})) try: import mypy except ImportError: self.workspace.show_message( 'Mypy is not installed. Follow mypy-vscode installation instructions.', lsp.MessageType.Warning) log.error(f'mypy is not installed. sys.path:\n{sys.path}') return {'capabilities': None} if self._check_parent_process and processId is not None: def watch_parent_process(pid): # exist when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive", pid) self.m_exit() log.debug("parent process %s is still alive", pid) threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() watching_thread = threading.Thread(target=watch_parent_process, args=(processId, )) watching_thread.daemon = True watching_thread.start() # Get our capabilities return {'capabilities': self.capabilities()} def m_initialized(self, **_kwargs): pass def get_document(self, doc_uri): return self.workspace.get_document(doc_uri) if doc_uri else None def m_text_document__did_close(self, textDocument=None, **_kwargs): self.workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): self.workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): for change in contentChanges: self.workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) def m_text_document__did_save(self, textDocument=None, **_kwargs): from . import mypy_server mypy_server.mypy_check(self.workspace, self.config) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): from . import mypy_definition return mypy_definition.get_definitions( self.config, self.workspace, self.get_document(textDocument['uri']), position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): from . import mypy_hover return mypy_hover.hover(self.workspace, self.get_document(textDocument['uri']), position) def m_workspace__did_change_configuration(self, settings=None): from . import mypy_server self.config.update((settings or {}).get('mypy', {})) mypy_server.configuration_changed(self.config, self.workspace)
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super(PythonLanguageServer, self).__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) return self.workspaces.get(workspace_uri, self.workspace) def _hook(self, hook_name, doc_uri=None, **kwargs): """Calls hook_name and returns a list of results from all registered handlers""" workspace = self._match_uri_to_workspace(doc_uri) doc = workspace.get_document(doc_uri) if doc_uri else None hook_handlers = self.config.plugin_manager.subset_hook_caller( hook_name, self.config.disabled_plugins) return hook_handlers(config=self.config, workspace=workspace, document=doc, **kwargs) def capabilities(self): server_capabilities = { 'codeActionProvider': True, 'codeLensProvider': { 'resolveProvider': False, # We may need to make this configurable }, 'completionProvider': { 'resolveProvider': False, # We know everything ahead of time 'triggerCharacters': ['.'] }, 'documentFormattingProvider': True, 'documentHighlightProvider': True, 'documentRangeFormattingProvider': True, 'documentSymbolProvider': True, 'definitionProvider': True, 'executeCommandProvider': { 'commands': flatten(self._hook('pyls_commands')) }, 'hoverProvider': True, 'referencesProvider': True, 'renameProvider': True, 'foldingRangeProvider': True, 'signatureHelpProvider': { 'triggerCharacters': ['(', ',', '='] }, 'textDocumentSync': { 'change': lsp.TextDocumentSyncKind.INCREMENTAL, 'save': { 'includeText': True, }, 'openClose': True, }, 'workspace': { 'workspaceFolders': { 'supported': True, 'changeNotifications': True } }, 'experimental': merge(self._hook('pyls_experimental_capabilities')) } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else '' self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.config = config.Config(rootUri, initializationOptions or {}, processId, _kwargs.get('capabilities', {})) self.workspace = Workspace(rootUri, self._endpoint, self.config) self.workspaces[rootUri] = self.workspace self._dispatchers = self._hook('pyls_dispatchers') self._hook('pyls_initialize') if self._check_parent_process and processId is not None and self.watching_thread is None: def watch_parent_process(pid): # exit when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive, exiting!", pid) self.m_exit() else: threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() self.watching_thread = threading.Thread( target=watch_parent_process, args=(processId, )) self.watching_thread.daemon = True self.watching_thread.start() # Get our capabilities return {'capabilities': self.capabilities()} def m_initialized(self, **_kwargs): self._hook('pyls_initialized') def code_actions(self, doc_uri, range, context): return flatten( self._hook('pyls_code_actions', doc_uri, range=range, context=context)) def code_lens(self, doc_uri): return flatten(self._hook('pyls_code_lens', doc_uri)) def completions(self, doc_uri, position): completions = self._hook('pyls_completions', doc_uri, position=position) return {'isIncomplete': False, 'items': flatten(completions)} def definitions(self, doc_uri, position): return flatten( self._hook('pyls_definitions', doc_uri, position=position)) def document_symbols(self, doc_uri): return flatten(self._hook('pyls_document_symbols', doc_uri)) def execute_command(self, command, arguments): return self._hook('pyls_execute_command', command=command, arguments=arguments) def format_document(self, doc_uri): return self._hook('pyls_format_document', doc_uri) def format_range(self, doc_uri, range): return self._hook('pyls_format_range', doc_uri, range=range) def highlight(self, doc_uri, position): return flatten( self._hook('pyls_document_highlight', doc_uri, position=position)) or None def hover(self, doc_uri, position): return self._hook('pyls_hover', doc_uri, position=position) or { 'contents': '' } @_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') def lint(self, doc_uri, is_saved): # Since we're debounced, the document may no longer be open workspace = self._match_uri_to_workspace(doc_uri) if doc_uri in workspace.documents: workspace.publish_diagnostics( doc_uri, flatten(self._hook('pyls_lint', doc_uri, is_saved=is_saved))) def references(self, doc_uri, position, exclude_declaration): return flatten( self._hook('pyls_references', doc_uri, position=position, exclude_declaration=exclude_declaration)) def rename(self, doc_uri, position, new_name): return self._hook('pyls_rename', doc_uri, position=position, new_name=new_name) def signature_help(self, doc_uri, position): return self._hook('pyls_signature_help', doc_uri, position=position) def folding(self, doc_uri): return self._hook('pyls_folding_range', doc_uri) def m_text_document__did_close(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) self._hook('pyls_document_did_open', textDocument['uri']) self.lint(textDocument['uri'], is_saved=True) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) for change in contentChanges: workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) self.lint(textDocument['uri'], is_saved=False) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument['uri'], is_saved=True) def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): return self.code_actions(textDocument['uri'], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): return self.code_lens(textDocument['uri']) def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): return self.completions(textDocument['uri'], position) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): return self.definitions(textDocument['uri'], position) def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): return self.highlight(textDocument['uri'], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): return self.hover(textDocument['uri'], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): return self.document_symbols(textDocument['uri']) def m_text_document__formatting(self, textDocument=None, _options=None, **_kwargs): # For now we're ignoring formatting options. return self.format_document(textDocument['uri']) def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): return self.rename(textDocument['uri'], position, newName) def m_text_document__folding_range(self, textDocument=None, **_kwargs): return self.folding(textDocument['uri']) def m_text_document__range_formatting(self, textDocument=None, range=None, _options=None, **_kwargs): # Again, we'll ignore formatting options for now. return self.format_range(textDocument['uri'], range) def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): exclude_declaration = not context['includeDeclaration'] return self.references(textDocument['uri'], position, exclude_declaration) def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): return self.signature_help(textDocument['uri'], position) def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get('pyls', {})) for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] workspace.update_config(settings) for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, event=None, **_kwargs): # pylint: disable=too-many-locals if event is None: return added = event.get('added', []) removed = event.get('removed', []) for removed_info in removed: if 'uri' in removed_info: removed_uri = removed_info['uri'] self.workspaces.pop(removed_uri, None) for added_info in added: if 'uri' in added_info: added_uri = added_info['uri'] workspace_config = config.Config(added_uri, self.config._init_opts, self.config._process_id, self.config._capabilities) self.workspaces[added_uri] = Workspace(added_uri, self._endpoint, workspace_config) root_workspace_removed = any(removed_info['uri'] == self.root_uri for removed_info in removed) workspace_added = len(added) > 0 and 'uri' in added[0] if root_workspace_removed and workspace_added: added_uri = added[0]['uri'] self.root_uri = added_uri new_root_workspace = self.workspaces[added_uri] self.config = new_root_workspace._config self.workspace = new_root_workspace elif root_workspace_removed: # NOTE: Removing the root workspace can only happen when the server # is closed, thus the else condition of this if can never happen. if self.workspaces: log.debug('Root workspace deleted!') available_workspaces = sorted(self.workspaces) first_workspace = available_workspaces[0] new_root_workspace = self.workspaces[first_workspace] self.root_uri = first_workspace self.config = new_root_workspace._config self.workspace = new_root_workspace # Migrate documents that are on the root workspace and have a better # match now doc_uris = list(self.workspace._docs.keys()) for uri in doc_uris: doc = self.workspace._docs.pop(uri) new_workspace = self._match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False for d in (changes or []): if d['uri'].endswith(PYTHON_FILE_EXTENSIONS): changed_py_files.add(d['uri']) elif d['uri'].endswith(CONFIG_FILEs): config_changed = True if config_changed: self.config.settings.cache_clear() elif not changed_py_files: # Only externally changed python files and lint configs may result in changed diagnostics. return for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] for doc_uri in workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri, is_saved=False) def m_workspace__execute_command(self, command=None, arguments=None): return self.execute_command(command, arguments)
class RstLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ def capabilities(self) -> dict: server_capabilities = { # Defines how text documents are synced "textDocumentSync": { "change": constants.TextDocumentSyncKind.INCREMENTAL, "save": { "includeText": True }, "openClose": True, }, "workspace": { "workspaceFolders": { "supported": True, "changeNotifications": True } }, # features provided # "codeActionProvider": True, "codeLensProvider": { # Code lens has a resolve provider as well "resolveProvider": False }, "completionProvider": { "resolveProvider": False, "triggerCharacters": [], # [":"], }, # "documentFormattingProvider": True, # "documentHighlightProvider": True, # "documentRangeFormattingProvider": True, "documentSymbolProvider": True, "definitionProvider": True, "executeCommandProvider": { "commands": utils.flatten(self.call_plugins( PluginTypes.rst_commands.value)) }, "hoverProvider": True, "referencesProvider": True, # "renameProvider": True, "foldingRangeProvider": True, # "signatureHelpProvider": {"triggerCharacters": []}, # "experimental": any, } logger.info("Server capabilities: %s", server_capabilities) return server_capabilities def __init__(self, rx, tx, check_parent_process=False): """Initialise the server.""" self.root_uri = None self.config = None # type: Optional[Config] self.workspaces = {} # type: Dict[str, Workspace] self.watching_thread = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def show_message(self, message: str, msg_type: int = constants.MessageType.Info): """Request the client show a pop-up message.""" self._endpoint.notify("window/showMessage", params={ "type": msg_type, "message": message }) def log_message(self, message: str, msg_type: int = constants.MessageType.Info): """Request the client log a message (in the servers output space).""" self._endpoint.notify("window/logMessage", params={ "type": msg_type, "message": str(message) }) def show_message_request( self, message: str, actions: List[dict] = (), msg_type: int = constants.MessageType.Info, ) -> Future: """Request the client show a pop-up message, with action buttons. Parameters ---------- actions: list[dict] e.g. [{"title": "A"}, {"title": "B"}] """ # for use see: https://github.com/Microsoft/language-server-protocol/issues/230 return self._endpoint.request( "window/showMessageRequest", params={ "type": msg_type, "message": message, "actions": list(actions) }, ) def request_config(self, items: List[dict]) -> Future: """Request configuration settings from the client. Parameters ---------- items : list[dict] e.g. [{"section": "rst_lsp"}] """ return self._endpoint.request("workspace/configuration", params={"items": items}) def publish_diagnostics(self, doc_uri: str, diagnostics: List[dict]): """Request configuration settings from the client.""" self._endpoint.notify( "textDocument/publishDiagnostics", params={ "uri": doc_uri, "diagnostics": diagnostics }, ) def apply_workspace_edit(self, edit: WorkspaceEdit): """Request to modify resource on the client side.""" return self._endpoint.request("workspace/applyEdit", params={"edit": edit}) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != "exit": # exit is the only allowed method during shutdown logger.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super(RstLanguageServer, self).__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): # Note: LSP protocol indicates that the server process should remain alive after # the client's Shutdown request, and wait for the client's Exit notification. for workspace in self.workspaces.values(): workspace.close() # TODO remove root cache? self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def match_uri_to_workspace(self, uri: str) -> Workspace: return uri2workspace(uri, self.workspaces, self.workspace) def match_uri_to_document(self, uri: str) -> Document: workspace = uri2workspace(uri, self.workspaces, self.workspace) return workspace.get_document(uri) def call_plugins(self, hook_name, doc_uri: Optional[str] = None, **kwargs): """Calls hook_name and returns a list of results from all registered handlers""" logger.debug("calling plugins") workspace = self.match_uri_to_workspace(doc_uri) doc = workspace.get_document(doc_uri) if doc_uri else None hook_handlers = self.config.plugin_manager.subset_hook_caller( hook_name, self.config.disabled_plugins) return hook_handlers(config=self.config, workspace=workspace, document=doc, **kwargs) @debounce(LINT_DEBOUNCE, keyed_by="doc_uri") def lint(self, doc_uri, is_saved): workspace = self.match_uri_to_workspace(doc_uri) if doc_uri in workspace.documents: self.publish_diagnostics( doc_uri, utils.flatten( self.call_plugins("rst_lint", doc_uri, is_saved=is_saved)), ) def m_initialize( self, processId: Optional[int] = None, rootUri: Optional[int] = None, rootPath: Optional[str] = None, initializationOptions: Optional[Any] = None, **_kwargs, ): logger.debug( "Language server initialized with %s %s %s %s", processId, rootUri, rootPath, initializationOptions, ) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else "" self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.config = Config( rootUri, initializationOptions or {}, processId, _kwargs.get("capabilities", {}), ) self.workspace = Workspace(rootUri, server=self, config=self.config) self.workspaces[rootUri] = self.workspace if (self._check_parent_process and processId is not None and self.watching_thread is None): def watch_parent_process(pid): # exit when the given pid is not alive if not utils.is_process_alive(pid): logger.info("parent process %s is not alive, exiting!", pid) self.m_exit() else: threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() self.watching_thread = threading.Thread( target=watch_parent_process, args=(processId, )) self.watching_thread.daemon = True self.watching_thread.start() return {"capabilities": self.capabilities()} def m_initialized(self, **_kwargs): pass def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get(CONFIG_NAMESPACE, {})) for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] # TODO debounce update_config (since requires read of all files) workspace.update_config(self.config) for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, added=None, removed=None, **_kwargs): for removed_info in removed: removed_uri = removed_info["uri"] self.workspaces.pop(removed_uri) for added_info in added: added_uri = added_info["uri"] self.workspaces[added_uri] = Workspace(added_uri, server=self, config=self.config) # Migrate documents that are on the root workspace and have a better match now doc_uris = list(self.workspace.documents.keys()) for uri in doc_uris: doc = self.workspace._open_docs.pop(uri) new_workspace = self.match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes: List[FileEvent], **_kwargs): self.log_message(f"didChangeWatchedFile {changes}") # TODO use to remove deleted files from the database? # not working at moment, need to watch RST on client? def m_text_document__did_open(self, textDocument: TextDocument, **_kwargs): workspace = self.match_uri_to_workspace(textDocument["uri"]) workspace.put_document(textDocument) self.lint(textDocument["uri"], is_saved=False) def m_text_document__did_close(self, textDocument: TextDocument, **_kwargs): workspace = self.match_uri_to_workspace(textDocument["uri"]) workspace.rm_document(textDocument["uri"]) def m_text_document__did_save(self, textDocument: TextDocument, **_kwargs): self.lint(textDocument["uri"], is_saved=False) def m_text_document__did_change(self, contentChanges: List[TextEdit], textDocument: TextDocument, **_kwargs): workspace = self.match_uri_to_workspace(textDocument["uri"]) for change in contentChanges: workspace.update_document(textDocument["uri"], change, version=textDocument.get("version")) self.lint(textDocument["uri"], is_saved=False) # FEATURES # -------- def m_text_document__code_lens(self, textDocument: TextDocument, **_kwargs): return utils.flatten( self.call_plugins(PluginTypes.rst_code_lens.value, textDocument["uri"])) def m_text_document__completion(self, textDocument: TextDocument, position: Position, **_kwargs) -> CompletionList: completions = self.call_plugins(PluginTypes.rst_completions.value, textDocument["uri"], position=position) return {"isIncomplete": False, "items": utils.flatten(completions)} def m_text_document__definition(self, textDocument: TextDocument, position: Position, **_kwargs) -> List[Location]: # TODO can also return LinkLocation return utils.flatten( self.call_plugins( PluginTypes.rst_definitions.value, textDocument["uri"], position=position, )) def m_text_document__document_symbol(self, textDocument: TextDocument, **_kwargs) -> List[DocumentSymbol]: return utils.flatten( self.call_plugins(PluginTypes.rst_document_symbols.value, textDocument["uri"])) def m_text_document__folding_range(self, textDocument: TextDocument, **_kwargs): return self.call_plugins(PluginTypes.rst_folding_range.value, textDocument["uri"]) def m_text_document__hover(self, textDocument: TextDocument, position: Position, **_kwargs): return self.call_plugins(PluginTypes.rst_hover.value, textDocument["uri"], position=position) or { "contents": "" } def m_text_document__references(self, textDocument: TextDocument, position: Position, context=None, **_kwargs) -> List[Location]: return utils.flatten( self.call_plugins( PluginTypes.rst_references.value, textDocument["uri"], position=position, # Include the declaration of the current symbol exclude_declaration=not context["includeDeclaration"], )) def m_workspace__execute_command(self, command: str, arguments: Optional[List[Any]] = None): """The workspace/executeCommand request is sent from the client to the server, to trigger command execution on the server. In most cases the server creates a WorkspaceEdit structure and applies the changes to the workspace using the request workspace/applyEdit, which is sent from the server to the client. """ edit = self.call_plugins(PluginTypes.rst_execute_command.value, command=command, arguments=arguments) self.apply_workspace_edit(edit)
class TestValidProject(TestCase): params = { "rootUri": uris.from_fs_path(TEST_PROJECT), "initializationOptions": {"project_file": "config.json"}, } def setUp(self): setupTestSuport(TEST_TEMP_PATH) _logger.debug("Creating server") tx_r, tx_w = os.pipe() self.tx_stream_reader = JsonRpcStreamReader(os.fdopen(tx_r, "rb")) rx_stream = MagicMock() rx_stream.closed = False self.server = lsp.HdlCheckerLanguageServer(rx_stream, os.fdopen(tx_w, "wb")) # Initialize server _logger.info("Calling m_initialize") self.assertEqual( self.server.m_initialize(**(self.params or {})), { "capabilities": { "textDocumentSync": 1, "definitionProvider": True, "hoverProvider": True, "referencesProvider": True, } }, ) _logger.info("Calling m_initialized") with patch("hdl_checker.lsp.onNewReleaseFound"): self.assertIsNone(self.server.m_initialized()) def teardown(self): _logger.debug("Shutting down server") msg = LSP_MSG_TEMPLATE.copy() msg.update({"method": "exit"}) self.server._endpoint.consume(msg) # Close the pipe from the server to stdout and empty any pending # messages self.tx_stream_reader.close() self.tx_stream_reader.listen(_logger.fatal) del self.server def _checkLintFileOnOpen(self, source): return self._checkLintFileOnMethod(source, "m_text_document__did_open") def _checkLintFileOnSave(self, source): return self._checkLintFileOnMethod(source, "m_text_document__did_save") def _checkLintFileOnMethod(self, source, method): with patch.object(self.server.workspace, "publish_diagnostics"): _logger.info("Sending %s request", method) getattr(self.server, method)( textDocument={"uri": unicode(uris.from_fs_path(source)), "text": None} ) mock_call = _waitOnMockCall(self.server.workspace.publish_diagnostics) doc_uri, diagnostics = mock_call[1] _logger.info("doc_uri: %s", doc_uri) _logger.info("diagnostics: %s", diagnostics) self.assertEqual(doc_uri, uris.from_fs_path(source)) return diagnostics def test_LintFileOnOpening(self): source = p.join(TEST_PROJECT, "basic_library", "clk_en_generator.vhd") with patch( "hdl_checker.lsp.Server.getMessagesByPath", return_value=[CheckerDiagnostic(filename=Path(source), text="some text")], ) as meth: self.assertCountEqual( self._checkLintFileOnOpen(source), [lsp.checkerDiagToLspDict(CheckerDiagnostic(text="some text"))], ) meth.assert_called_once_with(Path(source)) def runTestBuildSequenceTable(self, tablefmt): very_common_pkg = Path( p.join(TEST_PROJECT, "basic_library", "very_common_pkg.vhd") ) clk_en_generator = Path( p.join(TEST_PROJECT, "basic_library", "clk_en_generator.vhd") ) expected = [ "Build sequence for %s is" % str(clk_en_generator), "", tabulate( [ (1, "basic_library", str(very_common_pkg)), (2, DEFAULT_LIBRARY.name, str(clk_en_generator)), ], headers=("#", "Library", "Path"), tablefmt=tablefmt, ), ] self.assertEqual( self.server._getBuildSequenceForHover(clk_en_generator), "\n".join(expected) ) @patch("hdl_checker.lsp.HdlCheckerLanguageServer._use_markdown_for_hover", 0) @patch( "hdl_checker.builders.base_builder.BaseBuilder.builtin_libraries", (Identifier("ieee"),), ) def test_ReportBuildSequencePlain(self): self.runTestBuildSequenceTable(tablefmt="plain") @patch("hdl_checker.lsp.HdlCheckerLanguageServer._use_markdown_for_hover", 1) @patch( "hdl_checker.builders.base_builder.BaseBuilder.builtin_libraries", (Identifier("ieee"),), ) def test_ReportBuildSequenceMarkdown(self): self.runTestBuildSequenceTable(tablefmt="github") @patch.object( hdl_checker.base_server.BaseServer, "resolveDependencyToPath", lambda self, _: None, ) def test_DependencyInfoForPathNotFound(self): path = Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")) dependency = RequiredDesignUnit( name=Identifier("clock_divider"), library=Identifier("basic_library"), owner=path, locations=(), ) self.assertEqual( self.server._getDependencyInfoForHover(dependency), "Couldn't find a source defining 'basic_library.clock_divider'", ) @patch.object( hdl_checker.base_server.BaseServer, "resolveDependencyToPath", lambda self, _: (Path("some_path"), Identifier("some_library")), ) def test_ReportDependencyInfo(self): path = Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")) dependency = RequiredDesignUnit( name=Identifier("clock_divider"), library=Identifier("basic_library"), owner=path, locations=(), ) self.assertEqual( self.server._getDependencyInfoForHover(dependency), 'Path "some_path", library "some_library"', ) def test_ReportDesignUnitAccordingToPosition(self): UNIT_A = VhdlDesignUnit( owner=Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")), type_=DesignUnitType.entity, name="unit_a", locations=(Location(line=1, column=2), Location(line=3, column=4)), ) UNIT_B = VerilogDesignUnit( owner=Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")), type_=DesignUnitType.package, name="unit_b", locations=(Location(line=5, column=6), Location(line=7, column=8)), ) DEP_A = RequiredDesignUnit( name=Identifier("dep_a"), library=Identifier("lib_a"), owner=Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")), locations=(Location(line=9, column=10), Location(line=11, column=12)), ) DEP_B = RequiredDesignUnit( name=Identifier("dep_a"), library=Identifier("lib_a"), owner=Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")), locations=(Location(line=13, column=14), Location(line=15, column=16)), ) def getDesignUnitsByPath(self, path): # pylint: disable=unused-argument if path != Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")): it.fail("Expected foo.vhd but got %s" % path) return {UNIT_A, UNIT_B} def getDependenciesByPath(self, path): # pylint: disable=unused-argument if path != Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")): it.fail("Expected foo.vhd but got %s" % path) return {DEP_A, DEP_B} patches = ( patch.object( hdl_checker.database.Database, "getDesignUnitsByPath", getDesignUnitsByPath, ), patch.object( hdl_checker.database.Database, "getDependenciesByPath", getDependenciesByPath, ), ) path = Path(p.join(TEST_PROJECT, "another_library", "foo.vhd")) for _patch in patches: _patch.start() # Check locations outside return nothing self.assertIsNone(self.server._getElementAtPosition(path, Location(0, 0))) # Check design units are found, ensure boundaries match self.assertIsNone(self.server._getElementAtPosition(path, Location(1, 1))) self.assertIs(self.server._getElementAtPosition(path, Location(1, 2)), UNIT_A) self.assertIs(self.server._getElementAtPosition(path, Location(1, 7)), UNIT_A) self.assertIsNone(self.server._getElementAtPosition(path, Location(1, 8))) self.assertIsNone(self.server._getElementAtPosition(path, Location(3, 3))) self.assertIs(self.server._getElementAtPosition(path, Location(3, 4)), UNIT_A) self.assertIs(self.server._getElementAtPosition(path, Location(3, 9)), UNIT_A) self.assertIsNone(self.server._getElementAtPosition(path, Location(3, 10))) self.assertIsNone(self.server._getElementAtPosition(path, Location(5, 5))) self.assertIs(self.server._getElementAtPosition(path, Location(5, 6)), UNIT_B) self.assertIs(self.server._getElementAtPosition(path, Location(5, 11)), UNIT_B) self.assertIsNone(self.server._getElementAtPosition(path, Location(5, 12))) self.assertIsNone(self.server._getElementAtPosition(path, Location(7, 7))) self.assertIs(self.server._getElementAtPosition(path, Location(7, 8)), UNIT_B) self.assertIs(self.server._getElementAtPosition(path, Location(7, 13)), UNIT_B) self.assertIsNone(self.server._getElementAtPosition(path, Location(7, 14))) # Now check dependencies self.assertIsNone(self.server._getElementAtPosition(path, Location(9, 9))) self.assertIs(self.server._getElementAtPosition(path, Location(9, 10)), DEP_A) self.assertIs(self.server._getElementAtPosition(path, Location(9, 20)), DEP_A) self.assertIsNone(self.server._getElementAtPosition(path, Location(9, 21))) self.assertIsNone(self.server._getElementAtPosition(path, Location(11, 11))) self.assertIs(self.server._getElementAtPosition(path, Location(11, 12)), DEP_A) self.assertIs(self.server._getElementAtPosition(path, Location(11, 22)), DEP_A) self.assertIsNone(self.server._getElementAtPosition(path, Location(11, 23))) self.assertIsNone(self.server._getElementAtPosition(path, Location(13, 13))) self.assertIs(self.server._getElementAtPosition(path, Location(13, 14)), DEP_B) self.assertIs(self.server._getElementAtPosition(path, Location(13, 24)), DEP_B) self.assertIsNone(self.server._getElementAtPosition(path, Location(13, 25))) self.assertIsNone(self.server._getElementAtPosition(path, Location(15, 15))) self.assertIs(self.server._getElementAtPosition(path, Location(15, 16)), DEP_B) self.assertIs(self.server._getElementAtPosition(path, Location(15, 26)), DEP_B) self.assertIsNone(self.server._getElementAtPosition(path, Location(15, 27))) for _patch in patches: _patch.stop() @patch( "hdl_checker.builders.base_builder.BaseBuilder.builtin_libraries", (Identifier("ieee"),), ) def test_HoverOnInvalidRange(self): path = p.join(TEST_PROJECT, "another_library", "foo.vhd") self.assertIsNone( self.server.hover(uris.from_fs_path(path), {"line": 0, "character": 0}) ) @patch( "hdl_checker.builders.base_builder.BaseBuilder.builtin_libraries", (Identifier("ieee"),), ) def test_HoverOnDesignUnit(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") very_common_pkg = p.join(TEST_PROJECT, "basic_library", "very_common_pkg.vhd") package_with_constants = p.join( TEST_PROJECT, "basic_library", "package_with_constants.vhd" ) clock_divider = p.join(TEST_PROJECT, "basic_library", "clock_divider.vhd") expected = [ "Build sequence for %s is" % str(path_to_foo), "", tabulate( [ (1, "basic_library", str(very_common_pkg)), (2, "basic_library", str(package_with_constants)), (3, "basic_library", str(clock_divider)), (4, DEFAULT_LIBRARY.name, str(path_to_foo)), ], headers=("#", "Library", "Path"), tablefmt="plain", ), ] self.assertDictEqual( self.server.hover( uris.from_fs_path(path_to_foo), {"line": 7, "character": 7} ), {"contents": "\n".join(expected)}, ) @patch( "hdl_checker.builders.base_builder.BaseBuilder.builtin_libraries", (Identifier("ieee"),), ) def test_HoverOnDependency(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") clock_divider = p.join(TEST_PROJECT, "basic_library", "clock_divider.vhd") self.assertDictEqual( self.server.hover( uris.from_fs_path(path_to_foo), {"line": 32, "character": 32} ), {"contents": 'Path "%s", library "basic_library"' % clock_divider}, ) @patch( "hdl_checker.builders.base_builder.BaseBuilder.builtin_libraries", (Identifier("ieee"),), ) def test_GetDefinitionMatchingDependency(self): source = p.join(TEST_PROJECT, "basic_library", "use_entity_a_and_b.vhd") target = p.join(TEST_PROJECT, "basic_library", "two_entities_one_file.vhd") definitions = self.server.definitions( uris.from_fs_path(source), {"line": 1, "character": 9} ) self.assertIn( { "uri": uris.from_fs_path(target), "range": { "start": {"line": 1, "character": 7}, "end": {"line": 1, "character": 15}, }, }, definitions, ) self.assertIn( { "uri": uris.from_fs_path(target), "range": { "start": {"line": 4, "character": 7}, "end": {"line": 4, "character": 15}, }, }, definitions, ) @patch( "hdl_checker.builders.base_builder.BaseBuilder.builtin_libraries", (Identifier("ieee"),), ) def test_GetDefinitionBuiltInLibrary(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") self.assertEqual( self.server.definitions( uris.from_fs_path(path_to_foo), {"line": 3, "character": 15} ), [], ) @patch( "hdl_checker.builders.base_builder.BaseBuilder.builtin_libraries", (Identifier("ieee"),), ) def test_GetDefinitionNotKnown(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") self.assertEqual( self.server.definitions( uris.from_fs_path(path_to_foo), {"line": 0, "character": 0} ), [], ) @patch.object( hdl_checker.database.Database, "getReferencesToDesignUnit", return_value=[ RequiredDesignUnit( name=Identifier("clock_divider"), library=Identifier("basic_library"), owner=Path("some_path"), locations=(Location(1, 2), Location(3, 4)), ) ], ) def test_ReferencesOfAValidElement(self, get_references): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") # Make sure we picked up an existing element unit = self.server._getElementAtPosition(Path(path_to_foo), Location(7, 7)) self.assertIsNotNone(unit) self.assertCountEqual( self.server.references( doc_uri=uris.from_fs_path(path_to_foo), position={"line": 7, "character": 7}, exclude_declaration=True, ), ( { "uri": uris.from_fs_path("some_path"), "range": { "start": {"line": 1, "character": 2}, "end": {"line": 1, "character": 2}, }, }, { "uri": uris.from_fs_path("some_path"), "range": { "start": {"line": 3, "character": 4}, "end": {"line": 3, "character": 4}, }, }, ), ) get_references.assert_called_once() get_references.reset_mock() self.assertCountEqual( self.server.references( doc_uri=uris.from_fs_path(path_to_foo), position={"line": 7, "character": 7}, exclude_declaration=False, ), ( { "uri": uris.from_fs_path(path_to_foo), "range": { "start": {"line": 7, "character": 7}, "end": {"line": 7, "character": 7}, }, }, { "uri": uris.from_fs_path("some_path"), "range": { "start": {"line": 1, "character": 2}, "end": {"line": 1, "character": 2}, }, }, { "uri": uris.from_fs_path("some_path"), "range": { "start": {"line": 3, "character": 4}, "end": {"line": 3, "character": 4}, }, }, ), ) def test_ReferencesOfAnInvalidElement(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") # Make sure there's no element at this location unit = self.server._getElementAtPosition(Path(path_to_foo), Location(0, 0)) self.assertIsNone(unit) for exclude_declaration in (True, False): self.assertIsNone( self.server.references( doc_uri=uris.from_fs_path(path_to_foo), position={"line": 0, "character": 0}, exclude_declaration=exclude_declaration, ) )
from io import BytesIO from pyls_jsonrpc.streams import JsonRpcStreamReader lines = [ b'Content-Length: 49\r\n' b'Content-Type: application/vscode-jsonrpc; charset=utf8\r\n' b'\r\n' b'{"id": "hello", "method": "method", "params": {}}' ] o = BytesIO() for line in lines: o.write(line) o.seek(0) r = JsonRpcStreamReader(o) def consume(d): print("!", d, "!") r.listen(consume) r.close()
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False validator = ConfigValidator(None, True, False) self.config_spec = validator.get_config_spec() def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super(PythonLanguageServer, self).__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) return self.workspaces.get(workspace_uri, self.workspace) def capabilities(self): server_capabilities = { 'codeActionProvider': True, 'codeLensProvider': { 'resolveProvider': False, # We may need to make this configurable }, 'completionProvider': { 'resolveProvider': False, # We know everything ahead of time 'triggerCharacters': ['.'] }, 'documentFormattingProvider': True, 'documentHighlightProvider': True, 'documentRangeFormattingProvider': True, 'documentSymbolProvider': True, 'definitionProvider': True, 'executeCommandProvider': { 'commands': [] }, 'hoverProvider': True, 'referencesProvider': True, 'renameProvider': True, 'signatureHelpProvider': { 'triggerCharacters': ['(', ',', '='] }, 'textDocumentSync': { 'change': lsp.TextDocumentSyncKind.INCREMENTAL, 'save': { 'includeText': True, }, 'openClose': True, }, 'workspace': { 'workspaceFolders': { 'supported': True, 'changeNotifications': True } }, 'experimental': [] } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else '' self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.workspace = Workspace(rootUri, self._endpoint) self.workspaces[rootUri] = self.workspace self.config = config.Config(rootUri, initializationOptions or {}, processId, _kwargs.get('capabilities', {})) if self._check_parent_process and processId is not None and self.watching_thread is None: def watch_parent_process(pid): # exit when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive", pid) self.m_exit() else: threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() self.watching_thread = threading.Thread( target=watch_parent_process, args=(processId, )) self.watching_thread.daemon = True self.watching_thread.start() # Get our capabilities return {'capabilities': self.capabilities()} def m_initialized(self, **_kwargs): pass def code_actions(self, doc_uri, range, context): return [] def code_lens(self, doc_uri): return [] def _get_position_path(self, config, position): line = position['line'] character = position["character"] candidate_key = None if hasattr(config, "lc"): for key, lc in config.lc.data.items(): if len(lc) >= 4 and ((lc[0] <= line and lc[3] <= character) or (lc[1] < character and lc[2] < line)): candidate_key = key if candidate_key is not None: return [candidate_key] + self._get_position_path( config[candidate_key], position) else: return [] def _get_settings_suggestion(self, settings_name): suggestions = [] for key, value in self.config_spec.get(settings_name, {}).items(): if key.startswith("__"): continue if value[1].startswith("subconfig") or value[0] in ("list", "dict"): insert_text = key + ":\n " else: insert_text = key + ": " suggestions.append((key, insert_text, "")) return suggestions def _get_settings_value_suggestions(self, config, settings): if settings[1].startswith("enum"): values = settings[1][5:-1].split(",") suggestions = [(value, value + "\n", "") for value in values] elif settings[1].startswith("machine"): device = settings[1][8:-1] devices = self.workspace.get_complete_config().get(device, {}) suggestions = [(device, device + "\n", "") for device in devices] elif settings[1].startswith("subconfig"): settings_name = settings[1][10:-1] suggestions = self._get_settings_suggestion(settings_name) elif settings[1] == "bool": suggestions = [("True", "True\n", "(Default)" if "True" == settings[2] else ""), ("False", "False\n", "(Default)" if "False" == settings[2] else "")] else: suggestions = [] return suggestions def completions(self, doc_uri, position): completions = [] if position["line"] == 0 and position["character"] == 0: return { 'isIncomplete': False, 'items': [{ 'label': "#config_version=5", 'kind': lsp.CompletionItemKind.Text, 'detail': "", 'documentation': "", 'sortText': "#config_version=5", 'insertText': "#config_version=5\n" }, { 'label': "#show_version=5", 'kind': lsp.CompletionItemKind.Text, 'detail': "", 'documentation': "", 'sortText': "#show_version=5", 'insertText': "#show_version=5\n" }] } document = self.workspace.get_document(doc_uri) path = self._get_position_path(document.config_roundtrip, position) if len(path) == 0: # global level -> all devices are valid # TODO: check if this is a mode or machine file suggestions = [(key, key + ":\n ", "") for key, value in self.config_spec.items() if "machine" in value.get("__valid_in__", [])] elif len(path) == 1: # device name level -> no suggestions suggestions = [] elif len(path) == 2: # device level -> suggest config options suggestions = self._get_settings_suggestion(path[0]) elif len(path) == 3: # settings level device_settings = self.config_spec.get(path[0], {}) attribute_settings = device_settings.get(path[2], ["", "", ""]) suggestions = self._get_settings_value_suggestions( config, attribute_settings) elif len(path) >= 3: device_settings = self.config_spec.get(path[0], {}) for i in range(2, len(path) - 1): attribute_settings = device_settings.get(path[i], ["", "", ""]) if attribute_settings[1].startswith("subconfig"): settings_name = attribute_settings[1][10:-1] device_settings = self.config_spec.get(settings_name, {}) else: return [] attribute_settings = device_settings.get(path[len(path) - 1], ["", "", ""]) suggestions = self._get_settings_value_suggestions( config, attribute_settings) else: suggestions = [] for key, insertText, value in suggestions: completions.append({ 'label': key, 'kind': lsp.CompletionItemKind.Property, 'detail': "{}".format(value), 'documentation': "{} {}".format(key, value), 'sortText': key, 'insertText': insertText }) return {'isIncomplete': False, 'items': completions} def definitions(self, doc_uri, position): return [] def document_symbols(self, doc_uri): return [] def execute_command(self, command, arguments): return None def format_document(self, doc_uri): return None def format_range(self, doc_uri, range): return None def highlight(self, doc_uri, position): return None def hover(self, doc_uri, position): return {'contents': ''} @_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') def lint(self, doc_uri, is_saved): # Since we're debounced, the document may no longer be open workspace = self._match_uri_to_workspace(doc_uri) if doc_uri in workspace.documents: workspace.publish_diagnostics(doc_uri, []) def references(self, doc_uri, position, exclude_declaration): return [] def rename(self, doc_uri, position, new_name): return None def signature_help(self, doc_uri, position): return None def m_text_document__did_close(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) self.lint(textDocument['uri'], is_saved=True) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) for change in contentChanges: workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) self.lint(textDocument['uri'], is_saved=False) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument['uri'], is_saved=True) def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): return self.code_actions(textDocument['uri'], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): return self.code_lens(textDocument['uri']) def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): return self.completions(textDocument['uri'], position) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): return self.definitions(textDocument['uri'], position) def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): return self.highlight(textDocument['uri'], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): return self.hover(textDocument['uri'], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): return self.document_symbols(textDocument['uri']) def m_text_document__formatting(self, textDocument=None, _options=None, **_kwargs): # For now we're ignoring formatting options. return self.format_document(textDocument['uri']) def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): return self.rename(textDocument['uri'], position, newName) def m_text_document__range_formatting(self, textDocument=None, range=None, _options=None, **_kwargs): # Again, we'll ignore formatting options for now. return self.format_range(textDocument['uri'], range) def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): exclude_declaration = not context['includeDeclaration'] return self.references(textDocument['uri'], position, exclude_declaration) def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): return self.signature_help(textDocument['uri'], position) def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get('pyls', {})) for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, added=None, removed=None, **_kwargs): for removed_info in removed: removed_uri = removed_info['uri'] self.workspaces.pop(removed_uri) for added_info in added: added_uri = added_info['uri'] self.workspaces[added_uri] = Workspace(added_uri, self._endpoint) # Migrate documents that are on the root workspace and have a better # match now doc_uris = list(self.workspace._docs.keys()) for uri in doc_uris: doc = self.workspace._docs.pop(uri) new_workspace = self._match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False for d in (changes or []): if d['uri'].endswith(MPF_FILE_EXTENSIONS): changed_py_files.add(d['uri']) elif d['uri'].endswith(CONFIG_FILEs): config_changed = True if config_changed: self.config.settings.cache_clear() elif not changed_py_files: # Only externally changed python files and lint configs may result in changed diagnostics. return for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] for doc_uri in workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri, is_saved=False) def m_workspace__execute_command(self, command=None, arguments=None): return self.execute_command(command, arguments)
class MesonLanguageServer(MethodDispatcher): workspace: Optional[Workspace] config: Optional[Config] def __init__(self, rx, tx): self.workspace = None self.config = None self.rpc_reader = JsonRpcStreamReader(rx) self.rpc_writer = JsonRpcStreamWriter(tx) self.endpoint = Endpoint(self, self.rpc_writer.write, max_workers=64) self.shutdown = False def start(self): logger.info('Starting') self.rpc_reader.listen(self.endpoint.consume) @staticmethod def capabilities(): capabilities = { 'completionProvider': True, 'textDocumentSync': consts.TextDocumentSyncKind.INCREMENTAL } return capabilities def m_initialize(self, **kwargs): if logger.isEnabledFor(logging.DEBUG): logger.debug("Initializing: %s", repr(kwargs)) else: logger.info('Server initializing', repr(kwargs)) if 'rootUri' not in kwargs: root_uri = Path(kwargs.get('rootPath')).as_uri() else: root_uri = kwargs.get('rootUri') self.workspace = Workspace(root_uri, self.endpoint) self.config = Config(root_uri, kwargs.get('initializationOptions', {}), kwargs.get('processId'), kwargs.get('capabilities')) return dict(capabilities=self.capabilities()) def m_initialized(self, **_kwargs): pass def m_text_document__did_open(self, textDocument: dict): self.workspace.update( textDocument, dict(text=textDocument.get('text'), version=textDocument.get('version'))) self.workspace.build_ast() def m_text_document__did_close(self, textDocument): self.workspace.pop_document(textDocument) self.workspace.build_ast() def m_text_document__did_change(self, textDocument, contentChanges): for change in contentChanges: self.workspace.update(textDocument, change) self.workspace.build_ast() def m_text_document__did_save(self, textDocument): self.workspace.documents.get(textDocument.get('uri')).refresh() def m_workspace__did_change_watched_files(self, changes): self.workspace.build_ast() def m_text_document__hover(self, textDocument, position): doc = self.workspace.get_document(textDocument.get('uri')) start_pos, end_pos, word = doc.get_word_at_position(**position) start_posd = doc.get_char_count_position(start_pos) end_posd = doc.get_char_count_position(end_pos) return dict( contents= f"{word} (from {start_posd[0]}:{start_posd[1]} to {end_posd[0]}:{end_posd[1]})" ) def m_text_document__completion(self, **kwargs): return self.workspace.symbols def m_shutdown(self, **_kwargs): logger.warning('Shutting down') self.shutdown = True def m_exit(self, **_kwargs): self.endpoint.shutdown() self.rpc_reader.close() self.rpc_writer.close()
class PythonLanguageServer(MethodDispatcher): def __init__(self, reader, writer): self._jsonrpcstreamReader = JsonRpcStreamReader(reader) self._jsonrpcstreamWriter = JsonRpcStreamWriter(writer) self._endpoint = Endpoint(self, self._jsonrpcstreamWriter.write) self.isrunning = True self.workspace = None self.config = None def run(self): self._jsonrpcstreamReader.listen(self._endpoint.consume) def m_shutdown(self): self.isrunning = False def m_exit(self): self._jsonrpcstreamReader.close() self._jsonrpcstreamWriter.close() self._endpoint.shutdown() self.workspace = None def __getitem__(self, item): log.info(item) return super(PythonLanguageServer, self).__getitem__(item) def capablilities(self): import mypy_server is_patched_mypy = mypy_server.is_patched_mypy() if not is_patched_mypy: log.info( 'Using non-patched mypy, rich language features not available.' ) python_38 = sys.version_info >= (3, 8) if not python_38: log.info( 'Using Python before 3.8, rich language features not available.' ) rich_analysis_available = is_patched_mypy and python_38 # 这三个功能不知道是干嘛用的 server_capabilities = { "textDocumentSync": lsp.TextDocumentSyncKind.FULL, # full document text 'definitionProvider': rich_analysis_available, 'hoverProvider': rich_analysis_available } return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.info('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) self.workspace = WorkSpace(rootUri, self._endpoint) try: import mypy except ImportError: log.error('Do not install mypy module!') self.workspace.show_message('Mypy is not installed.', lsp.MessageType.Warning) return {'capablilities': None} self.mypyserver = mypy_server.Server(mypy_server.options, mypy_server.DEFAULT_STATUS_FILE) return {"capabilities": self.capablilities()} def m_initialized(self, **_kwargs): pass def m_text_document__did_open(self, textDocument=None, **_kwargs): self.workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): log.info(contentChanges) for change in contentChanges: self.workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) log.info(self.workspace._docs.items()) to_check = [] for uri, doc in self.workspace._docs.items(): fspath = uris.to_fs_path(uri) to_check.append(fspath) if mypy_server.mypy_version > "0.720": result = self.mypyserver.cmd_check(to_check, False, 80) else: result = self.mypyserver.cmd_check(to_check) diags = mypy_server.parse_mypy_out(result['out']) diagsparams = PublishDiagnosticParams(uri, diags).getDict() log.info(diagsparams) self.workspace.publish_diagnostics(diagsparams['uri'], diagsparams['diagnostics']) def m_text_document__did_save(self, textDocument=None, **_kwargs): import mypy_server mypy_server.mypy_check(self.workspace, self.config)