def __init__(self, rx, tx): self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._shutdown = False
def __init__(self, reader, writer): self._jsonrpcstreamReader = JsonRpcStreamReader(reader) self._jsonrpcstreamWriter = JsonRpcStreamWriter(writer) self._endpoint = Endpoint(self, self._jsonrpcstreamWriter.write) self.isrunning = True self.workspace = None self.config = None
def __enter__(self): """Context manager entrypoint. shell=True needed for pytest-cov to work in subprocess. """ # pylint: disable=consider-using-with self._sub = subprocess.Popen( [ sys.executable, os.path.join(os.path.dirname(__file__), "lsp_run.py"), ], stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0, cwd=self.cwd, env=os.environ, shell="WITH_COVERAGE" in os.environ, ) self._writer = JsonRpcStreamWriter( os.fdopen(self._sub.stdin.fileno(), "wb")) self._reader = JsonRpcStreamReader( os.fdopen(self._sub.stdout.fileno(), "rb")) dispatcher = { PUBLISH_DIAGNOSTICS: self._publish_diagnostics, WINDOW_SHOW_MESSAGE: self._window_show_message, WINDOW_LOG_MESSAGE: self._window_log_message, } self._endpoint = Endpoint(dispatcher, self._writer.write) self._thread_pool.submit(self._reader.listen, self._endpoint.consume) return self
def __init__(self, rx, tx): self.root_path = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write) self._dispatchers = [] self._shutdown = False
def __init__(self, rx, tx): self.workspace = None self.config = None self.rpc_reader = JsonRpcStreamReader(rx) self.rpc_writer = JsonRpcStreamWriter(tx) self.endpoint = Endpoint(self, self.rpc_writer.write, max_workers=64) self.shutdown = False
def __init__(self, rx, tx): self.log = logging.getLogger("{0}.{1}".format(self.__class__.__module__,self.__class__.__name__)) self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint( self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS ) self._shutdown = False
def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False
def __init__(self, rx, tx): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._shutdown = False
def __init__(self, rx, tx, check_parent_process=False): """Initialise the server.""" self.root_uri = None self.config = None # type: Optional[Config] self.workspaces = {} # type: Dict[str, Workspace] self.watching_thread = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False
def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False validator = ConfigValidator(None, True, False) self.config_spec = validator.get_config_spec()
def __init__(self, rx, tx, check_parent_process=False, sync_kind=0): self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=self.MAX_WORKERS) # the synchronisation kind between the language server and client self._sync_kind = sync_kind if sync_kind else self.SYNC_INCREMENTAL # the actual MRO analyser self._analyser = None
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super(PythonLanguageServer, self).__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _hook(self, hook_name, doc_uri=None, **kwargs): """Calls hook_name and returns a list of results from all registered handlers""" doc = self.workspace.get_document(doc_uri) if doc_uri else None hook_handlers = self.config.plugin_manager.subset_hook_caller( hook_name, self.config.disabled_plugins) return hook_handlers(config=self.config, workspace=self.workspace, document=doc, **kwargs) def capabilities(self): server_capabilities = { 'codeActionProvider': True, 'codeLensProvider': { 'resolveProvider': False, # We may need to make this configurable }, 'completionProvider': { 'resolveProvider': False, # We know everything ahead of time 'triggerCharacters': ['.'] }, 'documentFormattingProvider': True, 'documentHighlightProvider': True, 'documentRangeFormattingProvider': True, 'documentSymbolProvider': True, 'definitionProvider': True, 'executeCommandProvider': { 'commands': flatten(self._hook('pyls_commands')) }, 'hoverProvider': True, 'referencesProvider': True, 'renameProvider': True, 'signatureHelpProvider': { 'triggerCharacters': ['(', ','] }, 'textDocumentSync': lsp.TextDocumentSyncKind.INCREMENTAL, 'experimental': merge(self._hook('pyls_experimental_capabilities')) } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else '' self.workspace = Workspace(rootUri, self._endpoint) self.config = config.Config(rootUri, initializationOptions or {}, processId) self._dispatchers = self._hook('pyls_dispatchers') self._hook('pyls_initialize') if self._check_parent_process and processId is not None: def watch_parent_process(pid): # exist when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive", pid) self.m_exit() log.debug("parent process %s is still alive", pid) threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() watching_thread = threading.Thread(target=watch_parent_process, args=(processId, )) watching_thread.daemon = True watching_thread.start() # Get our capabilities return {'capabilities': self.capabilities()} def m_initialized(self, **_kwargs): pass def code_actions(self, doc_uri, range, context): return flatten( self._hook('pyls_code_actions', doc_uri, range=range, context=context)) def code_lens(self, doc_uri): return flatten(self._hook('pyls_code_lens', doc_uri)) def completions(self, doc_uri, position): completions = self._hook('pyls_completions', doc_uri, position=position) return {'isIncomplete': False, 'items': flatten(completions)} def definitions(self, doc_uri, position): return flatten( self._hook('pyls_definitions', doc_uri, position=position)) def document_symbols(self, doc_uri): return flatten(self._hook('pyls_document_symbols', doc_uri)) def execute_command(self, command, arguments): return self._hook('pyls_execute_command', command=command, arguments=arguments) def format_document(self, doc_uri): return self._hook('pyls_format_document', doc_uri) def format_range(self, doc_uri, range): return self._hook('pyls_format_range', doc_uri, range=range) def highlight(self, doc_uri, position): return flatten( self._hook('pyls_document_highlight', doc_uri, position=position)) or None def hover(self, doc_uri, position): return self._hook('pyls_hover', doc_uri, position=position) or { 'contents': '' } @_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') def lint(self, doc_uri): # Since we're debounced, the document may no longer be open if doc_uri in self.workspace.documents: self.workspace.publish_diagnostics( doc_uri, flatten(self._hook('pyls_lint', doc_uri))) def references(self, doc_uri, position, exclude_declaration): return flatten( self._hook('pyls_references', doc_uri, position=position, exclude_declaration=exclude_declaration)) def rename(self, doc_uri, position, new_name): return self._hook('pyls_rename', doc_uri, position=position, new_name=new_name) def signature_help(self, doc_uri, position): return self._hook('pyls_signature_help', doc_uri, position=position) def m_text_document__did_close(self, textDocument=None, **_kwargs): self.workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): self.workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) self._hook('pyls_document_did_open', textDocument['uri']) self.lint(textDocument['uri']) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): for change in contentChanges: self.workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) self.lint(textDocument['uri']) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument['uri']) def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): return self.code_actions(textDocument['uri'], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): return self.code_lens(textDocument['uri']) def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): return self.completions(textDocument['uri'], position) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): return self.definitions(textDocument['uri'], position) def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): return self.highlight(textDocument['uri'], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): return self.hover(textDocument['uri'], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): return self.document_symbols(textDocument['uri']) def m_text_document__formatting(self, textDocument=None, _options=None, **_kwargs): # For now we're ignoring formatting options. return self.format_document(textDocument['uri']) def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): return self.rename(textDocument['uri'], position, newName) def m_text_document__range_formatting(self, textDocument=None, range=None, _options=None, **_kwargs): # Again, we'll ignore formatting options for now. return self.format_range(textDocument['uri'], range) def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): exclude_declaration = not context['includeDeclaration'] return self.references(textDocument['uri'], position, exclude_declaration) def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): return self.signature_help(textDocument['uri'], position) def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get('pyls', {})) for doc_uri in self.workspace.documents: self.lint(doc_uri) def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set(d['uri'] for d in changes if d['uri'].endswith(PYTHON_FILE_EXTENSIONS)) # Only externally changed python files and lint configs may result in changed diagnostics. if not changed_py_files: return # TODO: We currently don't cache settings therefor we can just lint again. # Here would be the right point to update the settings after a change to config files. for doc_uri in self.workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri) def m_workspace__execute_command(self, command=None, arguments=None): return self.execute_command(command, arguments)
class MesonLanguageServer(MethodDispatcher): workspace: Optional[Workspace] config: Optional[Config] def __init__(self, rx, tx): self.workspace = None self.config = None self.rpc_reader = JsonRpcStreamReader(rx) self.rpc_writer = JsonRpcStreamWriter(tx) self.endpoint = Endpoint(self, self.rpc_writer.write, max_workers=64) self.shutdown = False def start(self): logger.info('Starting') self.rpc_reader.listen(self.endpoint.consume) @staticmethod def capabilities(): capabilities = { 'completionProvider': True, 'textDocumentSync': consts.TextDocumentSyncKind.INCREMENTAL } return capabilities def m_initialize(self, **kwargs): if logger.isEnabledFor(logging.DEBUG): logger.debug("Initializing: %s", repr(kwargs)) else: logger.info('Server initializing', repr(kwargs)) if 'rootUri' not in kwargs: root_uri = Path(kwargs.get('rootPath')).as_uri() else: root_uri = kwargs.get('rootUri') self.workspace = Workspace(root_uri, self.endpoint) self.config = Config(root_uri, kwargs.get('initializationOptions', {}), kwargs.get('processId'), kwargs.get('capabilities')) return dict(capabilities=self.capabilities()) def m_initialized(self, **_kwargs): pass def m_text_document__did_open(self, textDocument: dict): self.workspace.update( textDocument, dict(text=textDocument.get('text'), version=textDocument.get('version'))) self.workspace.build_ast() def m_text_document__did_close(self, textDocument): self.workspace.pop_document(textDocument) self.workspace.build_ast() def m_text_document__did_change(self, textDocument, contentChanges): for change in contentChanges: self.workspace.update(textDocument, change) self.workspace.build_ast() def m_text_document__did_save(self, textDocument): self.workspace.documents.get(textDocument.get('uri')).refresh() def m_workspace__did_change_watched_files(self, changes): self.workspace.build_ast() def m_text_document__hover(self, textDocument, position): doc = self.workspace.get_document(textDocument.get('uri')) start_pos, end_pos, word = doc.get_word_at_position(**position) start_posd = doc.get_char_count_position(start_pos) end_posd = doc.get_char_count_position(end_pos) return dict( contents= f"{word} (from {start_posd[0]}:{start_posd[1]} to {end_posd[0]}:{end_posd[1]})" ) def m_text_document__completion(self, **kwargs): return self.workspace.symbols def m_shutdown(self, **_kwargs): logger.warning('Shutting down') self.shutdown = True def m_exit(self, **_kwargs): self.endpoint.shutdown() self.rpc_reader.close() self.rpc_writer.close()
class LangServer(MethodDispatcher): """ Language server for coala base on JSON RPC. """ def __init__(self, rx, tx): self.root_path = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write) self._dispatchers = [] self._shutdown = False def start(self): self._jsonrpc_stream_reader.listen(self._endpoint.consume) def m_initialize(self, **params): """ Serve for the initialization request. """ # Notice that the root_path could be None. if 'rootUri' in params: self.root_path = path_from_uri(params['rootUri']) elif 'rootPath' in params: self.root_path = path_from_uri(params['rootPath']) return {'capabilities': {'textDocumentSync': 1}} def m_text_document__did_save(self, **params): """ Serve for did_change request. """ uri = params['textDocument']['uri'] path = path_from_uri(uri) diagnostics = output_to_diagnostics( run_coala_with_specific_file(self.root_path, path)) self.send_diagnostics(path, diagnostics) def m_shutdown(self, **_kwargs): self._shutdown = True # TODO: Support did_change and did_change_watched_files. # def serve_change(self, request): # '""Serve for the request of documentation changed.""' # params = request['params'] # uri = params['textDocument']['uri'] # path = path_from_uri(uri) # diagnostics = output_to_diagnostics( # run_coala_with_specific_file(self.root_path, path)) # self.send_diagnostics(path, diagnostics) # return None # # def serve_did_change_watched_files(self, request): # '""Serve for thr workspace/didChangeWatchedFiles request.""' # changes = request['changes'] # for fileEvent in changes: # uri = fileEvent['uri'] # path = path_from_uri(uri) # diagnostics = output_to_diagnostics( # run_coala_with_specific_file(self.root_path, path)) # self.send_diagnostics(path, diagnostics) def send_diagnostics(self, path, diagnostics): _diagnostics = [] if diagnostics is not None: _diagnostics = diagnostics params = { 'uri': 'file://{0}'.format(path), 'diagnostics': _diagnostics, } self._endpoint.notify('textDocument/publishDiagnostics', params=params)
class LspSession(MethodDispatcher): """Send and Receive messages over LSP as a test LS Client.""" def __init__(self, cwd=None): self.cwd = cwd if cwd else os.getcwd() # pylint: disable=consider-using-with self._thread_pool = ThreadPoolExecutor() self._sub = None self._writer = None self._reader = None self._endpoint = None self._notification_callbacks = {} def __enter__(self): """Context manager entrypoint. shell=True needed for pytest-cov to work in subprocess. """ # pylint: disable=consider-using-with self._sub = subprocess.Popen( [ sys.executable, os.path.join(os.path.dirname(__file__), "lsp_run.py"), ], stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0, cwd=self.cwd, env=os.environ, shell="WITH_COVERAGE" in os.environ, ) self._writer = JsonRpcStreamWriter( os.fdopen(self._sub.stdin.fileno(), "wb")) self._reader = JsonRpcStreamReader( os.fdopen(self._sub.stdout.fileno(), "rb")) dispatcher = { PUBLISH_DIAGNOSTICS: self._publish_diagnostics, WINDOW_SHOW_MESSAGE: self._window_show_message, WINDOW_LOG_MESSAGE: self._window_log_message, } self._endpoint = Endpoint(dispatcher, self._writer.write) self._thread_pool.submit(self._reader.listen, self._endpoint.consume) return self def __exit__(self, typ, value, _tb): self.shutdown(True) try: self._sub.terminate() except Exception: # pylint:disable=broad-except pass self._endpoint.shutdown() self._thread_pool.shutdown() def initialize( self, initialize_params=None, process_server_capabilities=None, ): """Sends the initialize request to LSP server.""" server_initialized = Event() def _after_initialize(fut): if process_server_capabilities: process_server_capabilities(fut.result()) self.initialized() server_initialized.set() self._send_request( "initialize", params=(initialize_params if initialize_params is not None else defaults.VSCODE_DEFAULT_INITIALIZE), handle_response=_after_initialize, ) server_initialized.wait() def initialized(self, initialized_params=None): """Sends the initialized notification to LSP server.""" self._endpoint.notify("initialized", initialized_params) def shutdown(self, should_exit, exit_timeout=LSP_EXIT_TIMEOUT): """Sends the shutdown request to LSP server.""" def _after_shutdown(_): if should_exit: self.exit_lsp(exit_timeout) self._send_request("shutdown", handle_response=_after_shutdown) def exit_lsp(self, exit_timeout=LSP_EXIT_TIMEOUT): """Handles LSP server process exit.""" self._endpoint.notify("exit") assert self._sub.wait(exit_timeout) == 0 def text_document_completion(self, completion_params): """Sends text document completion request to LSP server.""" fut = self._send_request("textDocument/completion", params=completion_params) return fut.result() def text_document_rename(self, rename_params): """Sends text document rename request to LSP server.""" fut = self._send_request("textDocument/rename", params=rename_params) return fut.result() def text_document_code_action(self, code_action_params): """Sends text document code action request to LSP server.""" fut = self._send_request("textDocument/codeAction", params=code_action_params) return fut.result() def text_document_hover(self, hover_params): """Sends text document hover request to LSP server.""" fut = self._send_request("textDocument/hover", params=hover_params) return fut.result() def text_document_signature_help(self, signature_help_params): """Sends text document hover request to LSP server.""" fut = self._send_request("textDocument/signatureHelp", params=signature_help_params) return fut.result() def text_document_definition(self, definition_params): """Sends text document defintion request to LSP server.""" fut = self._send_request("textDocument/definition", params=definition_params) return fut.result() def text_document_symbol(self, document_symbol_params): """Sends text document symbol request to LSP server.""" fut = self._send_request("textDocument/documentSymbol", params=document_symbol_params) return fut.result() def text_document_highlight(self, document_highlight_params): """Sends text document highlight request to LSP server.""" fut = self._send_request("textDocument/documentHighlight", params=document_highlight_params) return fut.result() def text_document_references(self, references_params): """Sends text document references request to LSP server.""" fut = self._send_request("textDocument/references", params=references_params) return fut.result() def workspace_symbol(self, workspace_symbol_params): """Sends workspace symbol request to LSP server.""" fut = self._send_request("workspace/symbol", params=workspace_symbol_params) return fut.result() def completion_item_resolve(self, resolve_params): """Sends completion item resolve request to LSP server.""" fut = self._send_request("completionItem/resolve", params=resolve_params) return fut.result() def notify_did_change(self, did_change_params): """Sends did change notification to LSP Server.""" self._send_notification("textDocument/didChange", params=did_change_params) def notify_did_save(self, did_save_params): """Sends did save notification to LSP Server.""" self._send_notification("textDocument/didSave", params=did_save_params) def notify_did_open(self, did_open_params): """Sends did open notification to LSP Server.""" self._send_notification("textDocument/didOpen", params=did_open_params) def set_notification_callback(self, notification_name, callback): """Set custom LS notification handler.""" self._notification_callbacks[notification_name] = callback def get_notification_callback(self, notification_name): """Gets callback if set or default callback for a given LS notification.""" try: return self._notification_callbacks[notification_name] except KeyError: def _default_handler(_params): """Default notification handler.""" return _default_handler def _publish_diagnostics(self, publish_diagnostics_params): """Internal handler for text document publish diagnostics.""" return self._handle_notification(PUBLISH_DIAGNOSTICS, publish_diagnostics_params) def _window_log_message(self, window_log_message_params): """Internal handler for window log message.""" return self._handle_notification(WINDOW_LOG_MESSAGE, window_log_message_params) def _window_show_message(self, window_show_message_params): """Internal handler for window show message.""" return self._handle_notification(WINDOW_SHOW_MESSAGE, window_show_message_params) def _handle_notification(self, notification_name, params): """Internal handler for notifications.""" fut = Future() def _handler(): callback = self.get_notification_callback(notification_name) callback(params) fut.set_result(None) self._thread_pool.submit(_handler) return fut def _send_request(self, name, params=None, handle_response=lambda f: f.done()): """Sends {name} request to the LSP server.""" fut = self._endpoint.request(name, params) fut.add_done_callback(handle_response) return fut def _send_notification(self, name, params=None): """Sends {name} notification to the LSP server.""" self._endpoint.notify(name, params)
class RstLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ def capabilities(self) -> dict: server_capabilities = { # Defines how text documents are synced "textDocumentSync": { "change": constants.TextDocumentSyncKind.INCREMENTAL, "save": { "includeText": True }, "openClose": True, }, "workspace": { "workspaceFolders": { "supported": True, "changeNotifications": True } }, # features provided # "codeActionProvider": True, "codeLensProvider": { # Code lens has a resolve provider as well "resolveProvider": False }, "completionProvider": { "resolveProvider": False, "triggerCharacters": [], # [":"], }, # "documentFormattingProvider": True, # "documentHighlightProvider": True, # "documentRangeFormattingProvider": True, "documentSymbolProvider": True, "definitionProvider": True, "executeCommandProvider": { "commands": utils.flatten(self.call_plugins( PluginTypes.rst_commands.value)) }, "hoverProvider": True, "referencesProvider": True, # "renameProvider": True, "foldingRangeProvider": True, # "signatureHelpProvider": {"triggerCharacters": []}, # "experimental": any, } logger.info("Server capabilities: %s", server_capabilities) return server_capabilities def __init__(self, rx, tx, check_parent_process=False): """Initialise the server.""" self.root_uri = None self.config = None # type: Optional[Config] self.workspaces = {} # type: Dict[str, Workspace] self.watching_thread = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def show_message(self, message: str, msg_type: int = constants.MessageType.Info): """Request the client show a pop-up message.""" self._endpoint.notify("window/showMessage", params={ "type": msg_type, "message": message }) def log_message(self, message: str, msg_type: int = constants.MessageType.Info): """Request the client log a message (in the servers output space).""" self._endpoint.notify("window/logMessage", params={ "type": msg_type, "message": str(message) }) def show_message_request( self, message: str, actions: List[dict] = (), msg_type: int = constants.MessageType.Info, ) -> Future: """Request the client show a pop-up message, with action buttons. Parameters ---------- actions: list[dict] e.g. [{"title": "A"}, {"title": "B"}] """ # for use see: https://github.com/Microsoft/language-server-protocol/issues/230 return self._endpoint.request( "window/showMessageRequest", params={ "type": msg_type, "message": message, "actions": list(actions) }, ) def request_config(self, items: List[dict]) -> Future: """Request configuration settings from the client. Parameters ---------- items : list[dict] e.g. [{"section": "rst_lsp"}] """ return self._endpoint.request("workspace/configuration", params={"items": items}) def publish_diagnostics(self, doc_uri: str, diagnostics: List[dict]): """Request configuration settings from the client.""" self._endpoint.notify( "textDocument/publishDiagnostics", params={ "uri": doc_uri, "diagnostics": diagnostics }, ) def apply_workspace_edit(self, edit: WorkspaceEdit): """Request to modify resource on the client side.""" return self._endpoint.request("workspace/applyEdit", params={"edit": edit}) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != "exit": # exit is the only allowed method during shutdown logger.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super(RstLanguageServer, self).__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): # Note: LSP protocol indicates that the server process should remain alive after # the client's Shutdown request, and wait for the client's Exit notification. for workspace in self.workspaces.values(): workspace.close() # TODO remove root cache? self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def match_uri_to_workspace(self, uri: str) -> Workspace: return uri2workspace(uri, self.workspaces, self.workspace) def match_uri_to_document(self, uri: str) -> Document: workspace = uri2workspace(uri, self.workspaces, self.workspace) return workspace.get_document(uri) def call_plugins(self, hook_name, doc_uri: Optional[str] = None, **kwargs): """Calls hook_name and returns a list of results from all registered handlers""" logger.debug("calling plugins") workspace = self.match_uri_to_workspace(doc_uri) doc = workspace.get_document(doc_uri) if doc_uri else None hook_handlers = self.config.plugin_manager.subset_hook_caller( hook_name, self.config.disabled_plugins) return hook_handlers(config=self.config, workspace=workspace, document=doc, **kwargs) @debounce(LINT_DEBOUNCE, keyed_by="doc_uri") def lint(self, doc_uri, is_saved): workspace = self.match_uri_to_workspace(doc_uri) if doc_uri in workspace.documents: self.publish_diagnostics( doc_uri, utils.flatten( self.call_plugins("rst_lint", doc_uri, is_saved=is_saved)), ) def m_initialize( self, processId: Optional[int] = None, rootUri: Optional[int] = None, rootPath: Optional[str] = None, initializationOptions: Optional[Any] = None, **_kwargs, ): logger.debug( "Language server initialized with %s %s %s %s", processId, rootUri, rootPath, initializationOptions, ) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else "" self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.config = Config( rootUri, initializationOptions or {}, processId, _kwargs.get("capabilities", {}), ) self.workspace = Workspace(rootUri, server=self, config=self.config) self.workspaces[rootUri] = self.workspace if (self._check_parent_process and processId is not None and self.watching_thread is None): def watch_parent_process(pid): # exit when the given pid is not alive if not utils.is_process_alive(pid): logger.info("parent process %s is not alive, exiting!", pid) self.m_exit() else: threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() self.watching_thread = threading.Thread( target=watch_parent_process, args=(processId, )) self.watching_thread.daemon = True self.watching_thread.start() return {"capabilities": self.capabilities()} def m_initialized(self, **_kwargs): pass def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get(CONFIG_NAMESPACE, {})) for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] # TODO debounce update_config (since requires read of all files) workspace.update_config(self.config) for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, added=None, removed=None, **_kwargs): for removed_info in removed: removed_uri = removed_info["uri"] self.workspaces.pop(removed_uri) for added_info in added: added_uri = added_info["uri"] self.workspaces[added_uri] = Workspace(added_uri, server=self, config=self.config) # Migrate documents that are on the root workspace and have a better match now doc_uris = list(self.workspace.documents.keys()) for uri in doc_uris: doc = self.workspace._open_docs.pop(uri) new_workspace = self.match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes: List[FileEvent], **_kwargs): self.log_message(f"didChangeWatchedFile {changes}") # TODO use to remove deleted files from the database? # not working at moment, need to watch RST on client? def m_text_document__did_open(self, textDocument: TextDocument, **_kwargs): workspace = self.match_uri_to_workspace(textDocument["uri"]) workspace.put_document(textDocument) self.lint(textDocument["uri"], is_saved=False) def m_text_document__did_close(self, textDocument: TextDocument, **_kwargs): workspace = self.match_uri_to_workspace(textDocument["uri"]) workspace.rm_document(textDocument["uri"]) def m_text_document__did_save(self, textDocument: TextDocument, **_kwargs): self.lint(textDocument["uri"], is_saved=False) def m_text_document__did_change(self, contentChanges: List[TextEdit], textDocument: TextDocument, **_kwargs): workspace = self.match_uri_to_workspace(textDocument["uri"]) for change in contentChanges: workspace.update_document(textDocument["uri"], change, version=textDocument.get("version")) self.lint(textDocument["uri"], is_saved=False) # FEATURES # -------- def m_text_document__code_lens(self, textDocument: TextDocument, **_kwargs): return utils.flatten( self.call_plugins(PluginTypes.rst_code_lens.value, textDocument["uri"])) def m_text_document__completion(self, textDocument: TextDocument, position: Position, **_kwargs) -> CompletionList: completions = self.call_plugins(PluginTypes.rst_completions.value, textDocument["uri"], position=position) return {"isIncomplete": False, "items": utils.flatten(completions)} def m_text_document__definition(self, textDocument: TextDocument, position: Position, **_kwargs) -> List[Location]: # TODO can also return LinkLocation return utils.flatten( self.call_plugins( PluginTypes.rst_definitions.value, textDocument["uri"], position=position, )) def m_text_document__document_symbol(self, textDocument: TextDocument, **_kwargs) -> List[DocumentSymbol]: return utils.flatten( self.call_plugins(PluginTypes.rst_document_symbols.value, textDocument["uri"])) def m_text_document__folding_range(self, textDocument: TextDocument, **_kwargs): return self.call_plugins(PluginTypes.rst_folding_range.value, textDocument["uri"]) def m_text_document__hover(self, textDocument: TextDocument, position: Position, **_kwargs): return self.call_plugins(PluginTypes.rst_hover.value, textDocument["uri"], position=position) or { "contents": "" } def m_text_document__references(self, textDocument: TextDocument, position: Position, context=None, **_kwargs) -> List[Location]: return utils.flatten( self.call_plugins( PluginTypes.rst_references.value, textDocument["uri"], position=position, # Include the declaration of the current symbol exclude_declaration=not context["includeDeclaration"], )) def m_workspace__execute_command(self, command: str, arguments: Optional[List[Any]] = None): """The workspace/executeCommand request is sent from the client to the server, to trigger command execution on the server. In most cases the server creates a WorkspaceEdit structure and applies the changes to the workspace using the request workspace/applyEdit, which is sent from the server to the client. """ edit = self.call_plugins(PluginTypes.rst_execute_command.value, command=command, arguments=arguments) self.apply_workspace_edit(edit)
class TeaspnServer(MethodDispatcher): """ This class handles JSON-RPC requests to/from a TEASPN client, working as a middle layer that passes requests to a TeaspnHandler. Also does serialization and deseriaization of TEASPN objects. """ def __init__(self, rx, tx, handler: TeaspnHandler, check_parent_process=False): self.workspace = None self.config = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._handler = handler self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): return { "capabilities": { "textDocumentSync": { "openClose": True, "change": TextDocumentSyncKind.Incremental }, "completionProvider": { "resolveProvider": True, "triggerCharacters": [' '] + list(__import__('string').ascii_lowercase) }, "codeActionProvider": True, "executeCommandProvider": { "commands": ['refactor.rewrite'] }, "definitionProvider": True, "hoverProvider": True } } def m_shutdown(self, **_kwargs): return None def m_text_document__did_open(self, textDocument=None, **_kwargs): self._handler.initialize_document(textDocument['uri'], textDocument['text']) diagnostics = self._handler.get_diagnostics() self._endpoint.notify( 'textDocument/publishDiagnostics', { 'uri': textDocument['uri'], 'diagnostics': [diagnostic.to_dict() for diagnostic in diagnostics] }) def m_text_document__did_change(self, textDocument=None, contentChanges=None, **_kwargs): for change in contentChanges: rng = Range.from_dict(change['range']) self._handler.update_document(range=rng, text=change['text']) diagnostics = self._handler.get_diagnostics() self._endpoint.notify( 'textDocument/publishDiagnostics', { 'uri': textDocument['uri'], 'diagnostics': [diagnostic.to_dict() for diagnostic in diagnostics] }) def m_text_document__syntax_highlight(self, textDocument=None, **_kwargs): highlights = self._handler.highlight_syntax() return [highlight.to_dict() for highlight in highlights] def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): position = Position.from_dict(position) completion_list = self._handler.get_completion_list(position=position) return completion_list.to_dict() def m_workspace__search_example(self, query=None, **_kwargs): examples = self._handler.search_example(query) return [example.to_dict() for example in examples] def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): rng = Range.from_dict(range) actions = [] if context is not None and context.get('diagnostics', []): # code action for resolving diagnostics -> invoke quick fix diagnostics = [ Diagnostic.from_dict(diag) for diag in context['diagnostics'] ] actions = self._handler.run_quick_fix(rng, diagnostics) # obtain paraphrases commands = self._handler.run_code_action(rng) return [ action_or_command.to_dict() for action_or_command in actions + commands ] def m_workspace__execute_command(self, command=None, arguments=None, **_kwargs): if command == 'refactor.rewrite': self._endpoint.request('workspace/applyEdit', {'edit': arguments[0]}) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): position = Position.from_dict(position) locations = self._handler.search_definition(position, uri=textDocument['uri']) return [location.to_dict() for location in locations] def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): position = Position.from_dict(position) hover = self._handler.hover(position) if hover: return hover.to_dict() else: return None
logging.basicConfig(level=logging.DEBUG) class Dispatcher(MethodDispatcher): def m_add(self, *, x: int, y: int) -> int: print("add!", x, y, x + y) return x + y q = Queue() def consume(params: dict) -> None: global q q.put_nowait(params) endpoint = Endpoint(Dispatcher(), consume) # notifyは単にconsumerに渡すだけ endpoint.notify("add", params={"x": 10, "y": 20}) # requestはfuturesのmappingに格納して終わりっぽい? fut = endpoint.request("add", params={"x": 10, "y": 20}) while not q.empty(): message = q.get() if "id" in message: result = endpoint._dispatcher[message["method"]](message["params"]) endpoint._handle_response(message["id"], result=result) endpoint.shutdown()
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False validator = ConfigValidator(None, True, False) self.config_spec = validator.get_config_spec() def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super(PythonLanguageServer, self).__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) return self.workspaces.get(workspace_uri, self.workspace) def capabilities(self): server_capabilities = { 'codeActionProvider': True, 'codeLensProvider': { 'resolveProvider': False, # We may need to make this configurable }, 'completionProvider': { 'resolveProvider': False, # We know everything ahead of time 'triggerCharacters': ['.'] }, 'documentFormattingProvider': True, 'documentHighlightProvider': True, 'documentRangeFormattingProvider': True, 'documentSymbolProvider': True, 'definitionProvider': True, 'executeCommandProvider': { 'commands': [] }, 'hoverProvider': True, 'referencesProvider': True, 'renameProvider': True, 'signatureHelpProvider': { 'triggerCharacters': ['(', ',', '='] }, 'textDocumentSync': { 'change': lsp.TextDocumentSyncKind.INCREMENTAL, 'save': { 'includeText': True, }, 'openClose': True, }, 'workspace': { 'workspaceFolders': { 'supported': True, 'changeNotifications': True } }, 'experimental': [] } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else '' self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.workspace = Workspace(rootUri, self._endpoint) self.workspaces[rootUri] = self.workspace self.config = config.Config(rootUri, initializationOptions or {}, processId, _kwargs.get('capabilities', {})) if self._check_parent_process and processId is not None and self.watching_thread is None: def watch_parent_process(pid): # exit when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive", pid) self.m_exit() else: threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() self.watching_thread = threading.Thread( target=watch_parent_process, args=(processId, )) self.watching_thread.daemon = True self.watching_thread.start() # Get our capabilities return {'capabilities': self.capabilities()} def m_initialized(self, **_kwargs): pass def code_actions(self, doc_uri, range, context): return [] def code_lens(self, doc_uri): return [] def _get_position_path(self, config, position): line = position['line'] character = position["character"] candidate_key = None if hasattr(config, "lc"): for key, lc in config.lc.data.items(): if len(lc) >= 4 and ((lc[0] <= line and lc[3] <= character) or (lc[1] < character and lc[2] < line)): candidate_key = key if candidate_key is not None: return [candidate_key] + self._get_position_path( config[candidate_key], position) else: return [] def _get_settings_suggestion(self, settings_name): suggestions = [] for key, value in self.config_spec.get(settings_name, {}).items(): if key.startswith("__"): continue if value[1].startswith("subconfig") or value[0] in ("list", "dict"): insert_text = key + ":\n " else: insert_text = key + ": " suggestions.append((key, insert_text, "")) return suggestions def _get_settings_value_suggestions(self, config, settings): if settings[1].startswith("enum"): values = settings[1][5:-1].split(",") suggestions = [(value, value + "\n", "") for value in values] elif settings[1].startswith("machine"): device = settings[1][8:-1] devices = self.workspace.get_complete_config().get(device, {}) suggestions = [(device, device + "\n", "") for device in devices] elif settings[1].startswith("subconfig"): settings_name = settings[1][10:-1] suggestions = self._get_settings_suggestion(settings_name) elif settings[1] == "bool": suggestions = [("True", "True\n", "(Default)" if "True" == settings[2] else ""), ("False", "False\n", "(Default)" if "False" == settings[2] else "")] else: suggestions = [] return suggestions def completions(self, doc_uri, position): completions = [] if position["line"] == 0 and position["character"] == 0: return { 'isIncomplete': False, 'items': [{ 'label': "#config_version=5", 'kind': lsp.CompletionItemKind.Text, 'detail': "", 'documentation': "", 'sortText': "#config_version=5", 'insertText': "#config_version=5\n" }, { 'label': "#show_version=5", 'kind': lsp.CompletionItemKind.Text, 'detail': "", 'documentation': "", 'sortText': "#show_version=5", 'insertText': "#show_version=5\n" }] } document = self.workspace.get_document(doc_uri) path = self._get_position_path(document.config_roundtrip, position) if len(path) == 0: # global level -> all devices are valid # TODO: check if this is a mode or machine file suggestions = [(key, key + ":\n ", "") for key, value in self.config_spec.items() if "machine" in value.get("__valid_in__", [])] elif len(path) == 1: # device name level -> no suggestions suggestions = [] elif len(path) == 2: # device level -> suggest config options suggestions = self._get_settings_suggestion(path[0]) elif len(path) == 3: # settings level device_settings = self.config_spec.get(path[0], {}) attribute_settings = device_settings.get(path[2], ["", "", ""]) suggestions = self._get_settings_value_suggestions( config, attribute_settings) elif len(path) >= 3: device_settings = self.config_spec.get(path[0], {}) for i in range(2, len(path) - 1): attribute_settings = device_settings.get(path[i], ["", "", ""]) if attribute_settings[1].startswith("subconfig"): settings_name = attribute_settings[1][10:-1] device_settings = self.config_spec.get(settings_name, {}) else: return [] attribute_settings = device_settings.get(path[len(path) - 1], ["", "", ""]) suggestions = self._get_settings_value_suggestions( config, attribute_settings) else: suggestions = [] for key, insertText, value in suggestions: completions.append({ 'label': key, 'kind': lsp.CompletionItemKind.Property, 'detail': "{}".format(value), 'documentation': "{} {}".format(key, value), 'sortText': key, 'insertText': insertText }) return {'isIncomplete': False, 'items': completions} def definitions(self, doc_uri, position): return [] def document_symbols(self, doc_uri): return [] def execute_command(self, command, arguments): return None def format_document(self, doc_uri): return None def format_range(self, doc_uri, range): return None def highlight(self, doc_uri, position): return None def hover(self, doc_uri, position): return {'contents': ''} @_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') def lint(self, doc_uri, is_saved): # Since we're debounced, the document may no longer be open workspace = self._match_uri_to_workspace(doc_uri) if doc_uri in workspace.documents: workspace.publish_diagnostics(doc_uri, []) def references(self, doc_uri, position, exclude_declaration): return [] def rename(self, doc_uri, position, new_name): return None def signature_help(self, doc_uri, position): return None def m_text_document__did_close(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) self.lint(textDocument['uri'], is_saved=True) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) for change in contentChanges: workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) self.lint(textDocument['uri'], is_saved=False) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument['uri'], is_saved=True) def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): return self.code_actions(textDocument['uri'], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): return self.code_lens(textDocument['uri']) def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): return self.completions(textDocument['uri'], position) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): return self.definitions(textDocument['uri'], position) def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): return self.highlight(textDocument['uri'], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): return self.hover(textDocument['uri'], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): return self.document_symbols(textDocument['uri']) def m_text_document__formatting(self, textDocument=None, _options=None, **_kwargs): # For now we're ignoring formatting options. return self.format_document(textDocument['uri']) def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): return self.rename(textDocument['uri'], position, newName) def m_text_document__range_formatting(self, textDocument=None, range=None, _options=None, **_kwargs): # Again, we'll ignore formatting options for now. return self.format_range(textDocument['uri'], range) def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): exclude_declaration = not context['includeDeclaration'] return self.references(textDocument['uri'], position, exclude_declaration) def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): return self.signature_help(textDocument['uri'], position) def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get('pyls', {})) for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, added=None, removed=None, **_kwargs): for removed_info in removed: removed_uri = removed_info['uri'] self.workspaces.pop(removed_uri) for added_info in added: added_uri = added_info['uri'] self.workspaces[added_uri] = Workspace(added_uri, self._endpoint) # Migrate documents that are on the root workspace and have a better # match now doc_uris = list(self.workspace._docs.keys()) for uri in doc_uris: doc = self.workspace._docs.pop(uri) new_workspace = self._match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False for d in (changes or []): if d['uri'].endswith(MPF_FILE_EXTENSIONS): changed_py_files.add(d['uri']) elif d['uri'].endswith(CONFIG_FILEs): config_changed = True if config_changed: self.config.settings.cache_clear() elif not changed_py_files: # Only externally changed python files and lint configs may result in changed diagnostics. return for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] for doc_uri in workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri, is_saved=False) def m_workspace__execute_command(self, command=None, arguments=None): return self.execute_command(command, arguments)
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError return super(PythonLanguageServer, self).__getitem__(item) def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def capabilities(self): from . import mypy_server is_patched_mypy = mypy_server.is_patched_mypy() if not is_patched_mypy: log.info( 'Using non-patched mypy, rich language features not available.' ) python_38 = sys.version_info >= (3, 8) if not python_38: log.info( 'Using Python before 3.8, rich language features not available.' ) rich_analysis_available = is_patched_mypy and python_38 server_capabilities = { 'definitionProvider': rich_analysis_available, 'hoverProvider': rich_analysis_available, 'textDocumentSync': lsp.TextDocumentSyncKind.INCREMENTAL } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else '' self.workspace = Workspace(rootUri, self._endpoint) self.config = config.Config(rootUri, initializationOptions or {}, processId, _kwargs.get('capabilities', {})) try: import mypy except ImportError: self.workspace.show_message( 'Mypy is not installed. Follow mypy-vscode installation instructions.', lsp.MessageType.Warning) log.error(f'mypy is not installed. sys.path:\n{sys.path}') return {'capabilities': None} if self._check_parent_process and processId is not None: def watch_parent_process(pid): # exist when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive", pid) self.m_exit() log.debug("parent process %s is still alive", pid) threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() watching_thread = threading.Thread(target=watch_parent_process, args=(processId, )) watching_thread.daemon = True watching_thread.start() # Get our capabilities return {'capabilities': self.capabilities()} def m_initialized(self, **_kwargs): pass def get_document(self, doc_uri): return self.workspace.get_document(doc_uri) if doc_uri else None def m_text_document__did_close(self, textDocument=None, **_kwargs): self.workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): self.workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): for change in contentChanges: self.workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) def m_text_document__did_save(self, textDocument=None, **_kwargs): from . import mypy_server mypy_server.mypy_check(self.workspace, self.config) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): from . import mypy_definition return mypy_definition.get_definitions( self.config, self.workspace, self.get_document(textDocument['uri']), position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): from . import mypy_hover return mypy_hover.hover(self.workspace, self.get_document(textDocument['uri']), position) def m_workspace__did_change_configuration(self, settings=None): from . import mypy_server self.config.update((settings or {}).get('mypy', {})) mypy_server.configuration_changed(self.config, self.workspace)
from pyls_jsonrpc.dispatchers import MethodDispatcher from pyls_jsonrpc.endpoint import Endpoint logging.basicConfig(level=logging.DEBUG) class Dispatcher(MethodDispatcher): def m_add(self, *, x: int, y: int) -> int: return x + y def consume(*args, **kwargs): print("!", args, kwargs) endpoint = Endpoint(Dispatcher(), consume) message = { "jsonrpc": "2.0", "method": "add", "params": { "x": 10, "y": 20 }, } print("-") endpoint.consume(message) print("-") endpoint.consume(ChainMap({"id": 1}, message)) print("-")
def cont() -> dict: time.sleep(1) print(f"*** {name}: bye ***") return {"time": time.time() - t} return cont def consume(*args, **kwargs): print("!!!", args, kwargs) logging.basicConfig( level=logging.DEBUG, format="\t" + logging.BASIC_FORMAT, ) endpoint = Endpoint(Dispatcher(), consume) message = { "jsonrpc": "2.0", "method": "hello", "params": { "name": "foo", }, } endpoint.consume(ChainMap({"id": 1}, message)) endpoint.consume(ChainMap({"id": 2, "params": {"name": "bar"}}, message)) endpoint.shutdown()
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ # pylint: disable=too-many-public-methods,redefined-builtin def __init__(self, rx, tx, check_parent_process=False): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._dispatchers = [] self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != 'exit': # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError try: return super(PythonLanguageServer, self).__getitem__(item) except KeyError: # Fallback through extra dispatchers for dispatcher in self._dispatchers: try: return dispatcher[item] except KeyError: continue raise KeyError() def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) return self.workspaces.get(workspace_uri, self.workspace) def _hook(self, hook_name, doc_uri=None, **kwargs): """Calls hook_name and returns a list of results from all registered handlers""" workspace = self._match_uri_to_workspace(doc_uri) doc = workspace.get_document(doc_uri) if doc_uri else None hook_handlers = self.config.plugin_manager.subset_hook_caller( hook_name, self.config.disabled_plugins) return hook_handlers(config=self.config, workspace=workspace, document=doc, **kwargs) def capabilities(self): server_capabilities = { 'codeActionProvider': True, 'codeLensProvider': { 'resolveProvider': False, # We may need to make this configurable }, 'completionProvider': { 'resolveProvider': False, # We know everything ahead of time 'triggerCharacters': ['.'] }, 'documentFormattingProvider': True, 'documentHighlightProvider': True, 'documentRangeFormattingProvider': True, 'documentSymbolProvider': True, 'definitionProvider': True, 'executeCommandProvider': { 'commands': flatten(self._hook('pyls_commands')) }, 'hoverProvider': True, 'referencesProvider': True, 'renameProvider': True, 'foldingRangeProvider': True, 'signatureHelpProvider': { 'triggerCharacters': ['(', ',', '='] }, 'textDocumentSync': { 'change': lsp.TextDocumentSyncKind.INCREMENTAL, 'save': { 'includeText': True, }, 'openClose': True, }, 'workspace': { 'workspaceFolders': { 'supported': True, 'changeNotifications': True } }, 'experimental': merge(self._hook('pyls_experimental_capabilities')) } log.info('Server capabilities: %s', server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else '' self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.config = config.Config(rootUri, initializationOptions or {}, processId, _kwargs.get('capabilities', {})) self.workspace = Workspace(rootUri, self._endpoint, self.config) self.workspaces[rootUri] = self.workspace self._dispatchers = self._hook('pyls_dispatchers') self._hook('pyls_initialize') if self._check_parent_process and processId is not None and self.watching_thread is None: def watch_parent_process(pid): # exit when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive, exiting!", pid) self.m_exit() else: threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() self.watching_thread = threading.Thread( target=watch_parent_process, args=(processId, )) self.watching_thread.daemon = True self.watching_thread.start() # Get our capabilities return {'capabilities': self.capabilities()} def m_initialized(self, **_kwargs): self._hook('pyls_initialized') def code_actions(self, doc_uri, range, context): return flatten( self._hook('pyls_code_actions', doc_uri, range=range, context=context)) def code_lens(self, doc_uri): return flatten(self._hook('pyls_code_lens', doc_uri)) def completions(self, doc_uri, position): completions = self._hook('pyls_completions', doc_uri, position=position) return {'isIncomplete': False, 'items': flatten(completions)} def definitions(self, doc_uri, position): return flatten( self._hook('pyls_definitions', doc_uri, position=position)) def document_symbols(self, doc_uri): return flatten(self._hook('pyls_document_symbols', doc_uri)) def execute_command(self, command, arguments): return self._hook('pyls_execute_command', command=command, arguments=arguments) def format_document(self, doc_uri): return self._hook('pyls_format_document', doc_uri) def format_range(self, doc_uri, range): return self._hook('pyls_format_range', doc_uri, range=range) def highlight(self, doc_uri, position): return flatten( self._hook('pyls_document_highlight', doc_uri, position=position)) or None def hover(self, doc_uri, position): return self._hook('pyls_hover', doc_uri, position=position) or { 'contents': '' } @_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') def lint(self, doc_uri, is_saved): # Since we're debounced, the document may no longer be open workspace = self._match_uri_to_workspace(doc_uri) if doc_uri in workspace.documents: workspace.publish_diagnostics( doc_uri, flatten(self._hook('pyls_lint', doc_uri, is_saved=is_saved))) def references(self, doc_uri, position, exclude_declaration): return flatten( self._hook('pyls_references', doc_uri, position=position, exclude_declaration=exclude_declaration)) def rename(self, doc_uri, position, new_name): return self._hook('pyls_rename', doc_uri, position=position, new_name=new_name) def signature_help(self, doc_uri, position): return self._hook('pyls_signature_help', doc_uri, position=position) def folding(self, doc_uri): return self._hook('pyls_folding_range', doc_uri) def m_text_document__did_close(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.rm_document(textDocument['uri']) def m_text_document__did_open(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) self._hook('pyls_document_did_open', textDocument['uri']) self.lint(textDocument['uri'], is_saved=True) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument['uri']) for change in contentChanges: workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) self.lint(textDocument['uri'], is_saved=False) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument['uri'], is_saved=True) def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): return self.code_actions(textDocument['uri'], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): return self.code_lens(textDocument['uri']) def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): return self.completions(textDocument['uri'], position) def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): return self.definitions(textDocument['uri'], position) def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): return self.highlight(textDocument['uri'], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): return self.hover(textDocument['uri'], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): return self.document_symbols(textDocument['uri']) def m_text_document__formatting(self, textDocument=None, _options=None, **_kwargs): # For now we're ignoring formatting options. return self.format_document(textDocument['uri']) def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): return self.rename(textDocument['uri'], position, newName) def m_text_document__folding_range(self, textDocument=None, **_kwargs): return self.folding(textDocument['uri']) def m_text_document__range_formatting(self, textDocument=None, range=None, _options=None, **_kwargs): # Again, we'll ignore formatting options for now. return self.format_range(textDocument['uri'], range) def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): exclude_declaration = not context['includeDeclaration'] return self.references(textDocument['uri'], position, exclude_declaration) def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): return self.signature_help(textDocument['uri'], position) def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get('pyls', {})) for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] workspace.update_config(settings) for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, event=None, **_kwargs): # pylint: disable=too-many-locals if event is None: return added = event.get('added', []) removed = event.get('removed', []) for removed_info in removed: if 'uri' in removed_info: removed_uri = removed_info['uri'] self.workspaces.pop(removed_uri, None) for added_info in added: if 'uri' in added_info: added_uri = added_info['uri'] workspace_config = config.Config(added_uri, self.config._init_opts, self.config._process_id, self.config._capabilities) self.workspaces[added_uri] = Workspace(added_uri, self._endpoint, workspace_config) root_workspace_removed = any(removed_info['uri'] == self.root_uri for removed_info in removed) workspace_added = len(added) > 0 and 'uri' in added[0] if root_workspace_removed and workspace_added: added_uri = added[0]['uri'] self.root_uri = added_uri new_root_workspace = self.workspaces[added_uri] self.config = new_root_workspace._config self.workspace = new_root_workspace elif root_workspace_removed: # NOTE: Removing the root workspace can only happen when the server # is closed, thus the else condition of this if can never happen. if self.workspaces: log.debug('Root workspace deleted!') available_workspaces = sorted(self.workspaces) first_workspace = available_workspaces[0] new_root_workspace = self.workspaces[first_workspace] self.root_uri = first_workspace self.config = new_root_workspace._config self.workspace = new_root_workspace # Migrate documents that are on the root workspace and have a better # match now doc_uris = list(self.workspace._docs.keys()) for uri in doc_uris: doc = self.workspace._docs.pop(uri) new_workspace = self._match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False for d in (changes or []): if d['uri'].endswith(PYTHON_FILE_EXTENSIONS): changed_py_files.add(d['uri']) elif d['uri'].endswith(CONFIG_FILEs): config_changed = True if config_changed: self.config.settings.cache_clear() elif not changed_py_files: # Only externally changed python files and lint configs may result in changed diagnostics. return for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] for doc_uri in workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri, is_saved=False) def m_workspace__execute_command(self, command=None, arguments=None): return self.execute_command(command, arguments)
class PythonLanguageServer(MethodDispatcher): """ Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md Based on: https://github.com/palantir/python-language-server/blob/develop/pyls/python_ls.py """ def __init__(self, rx, tx): self.workspace = None self.config = None self.root_uri = None self.watching_thread = None self.workspaces = {} self.uri_workspace_mapper = {} self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._shutdown = False def start(self): """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) def m_shutdown(self, **_kwargs): self._shutdown = True return None def m_exit(self, **_kwargs): self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) return self.workspaces.get(workspace_uri, self.workspace) def capabilities(self): server_capabilities = { "codeActionProvider": False, # "codeLensProvider": { # "resolveProvider": False, # We may need to make this configurable # }, # "completionProvider": { # "resolveProvider": False, # We know everything ahead of time # "triggerCharacters": ["."], # }, "documentFormattingProvider": False, "documentHighlightProvider": False, "documentRangeFormattingProvider": False, "documentSymbolProvider": False, "definitionProvider": False, "executeCommandProvider": { "commands": [] }, "hoverProvider": False, "referencesProvider": False, "renameProvider": False, "foldingRangeProvider": False, # "signatureHelpProvider": { # 'triggerCharacters': ['(', ',', '='] # }, "textDocumentSync": { "change": lsp.TextDocumentSyncKind.INCREMENTAL, "save": { "includeText": True, }, "openClose": True, }, "workspace": { "workspaceFolders": { "supported": True, "changeNotifications": True } }, } log.info("Server capabilities: %s", server_capabilities) return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.debug( "Language server initialized with:\n processId: %s\n rootUri: %s\n rootPath: %s\n initializationOptions: %s", processId, rootUri, rootPath, initializationOptions, ) if rootUri is None: rootUri = uris.from_fs_path( rootPath) if rootPath is not None else "" self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri self.config = config.Config( rootUri, initializationOptions or {}, processId, _kwargs.get("capabilities", {}), ) self.workspace = Workspace(rootUri, self._endpoint, self.config) self.workspaces[rootUri] = self.workspace if processId not in (None, -1, 0) and self.watching_thread is None: def watch_parent_process(pid): # exit when the given pid is not alive while True: if not _utils.is_process_alive(pid): # Note: just exit since the parent process already # exited. log.info( "Force-quit process: %s", os.getpid(), ) os._exit(0) time.sleep(PARENT_PROCESS_WATCH_INTERVAL) self.watching_thread = threading.Thread( target=watch_parent_process, args=(processId, )) self.watching_thread.daemon = True self.watching_thread.start() # Get our capabilities return {"capabilities": self.capabilities()} def m_initialized(self, **_kwargs): pass def lint(self, doc_uri, is_saved): raise NotImplementedError("Subclasses must override.") def m_text_document__did_close(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument["uri"]) workspace.rm_document(textDocument["uri"]) def m_text_document__did_open(self, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument["uri"]) if workspace is None: log.critical("Unable to find workspace for: %s", (textDocument, )) return workspace.put_document( textDocument["uri"], textDocument["text"], version=textDocument.get("version"), ) self.lint(textDocument["uri"], is_saved=True) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): workspace = self._match_uri_to_workspace(textDocument["uri"]) if workspace is None: log.critical("Unable to find workspace for: %s", (textDocument, )) return for change in contentChanges: workspace.update_document(textDocument["uri"], change, version=textDocument.get("version")) self.lint(textDocument["uri"], is_saved=False) def m_text_document__did_save(self, textDocument=None, **_kwargs): self.lint(textDocument["uri"], is_saved=True) def m_workspace__did_change_configuration(self, settings=None): self.config.update((settings or {}).get("robot", {})) for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] workspace.update_config(self.config) for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) def m_workspace__did_change_workspace_folders(self, added=None, removed=None, **_kwargs): for removed_info in removed: removed_uri = removed_info["uri"] self.workspaces.pop(removed_uri) for added_info in added: added_uri = added_info["uri"] self.workspaces[added_uri] = Workspace(added_uri, self._endpoint, self.config) # Migrate documents that are on the root workspace and have a better # match now doc_uris = list(self.workspace._docs.keys()) for uri in doc_uris: doc = self.workspace._docs.pop(uri) new_workspace = self._match_uri_to_workspace(uri) new_workspace._docs[uri] = doc def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False for d in changes or []: if d["uri"].endswith(PYTHON_FILE_EXTENSIONS): changed_py_files.add(d["uri"]) elif d["uri"].endswith(CONFIG_FILEs): config_changed = True if config_changed: self.config.settings.cache_clear() elif not changed_py_files: # Only externally changed python files and lint configs may result in changed diagnostics. return for workspace_uri in self.workspaces: workspace = self.workspaces[workspace_uri] for doc_uri in workspace.documents: # Changes in doc_uri are already handled by m_text_document__did_save if doc_uri not in changed_py_files: self.lint(doc_uri, is_saved=False)
def endpoint(dispatcher, consumer): return Endpoint(dispatcher, consumer, id_generator=lambda: MSG_ID)
class PythonDaemon(MethodDispatcher): """Base Python Daemon with simple methods to check if a module exists, get version info and the like. To add additional methods, please create a separate class based off this and pass in the arg `--daemon-module` to `vscode_datascience_helpers.daemon`. """ def __init__(self, rx, tx): self.log = logging.getLogger("{0}.{1}".format( self.__class__.__module__, self.__class__.__name__)) self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) self._shutdown = False def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" if self._shutdown and item != "exit": # exit is the only allowed method during shutdown self.log.debug("Ignoring non-exit method during shutdown: %s", item) raise KeyError self.log.info("Execute rpc method %s from %s", item, sys.executable) return super().__getitem__(item) def start(self): """Entry point for the server.""" self._shutdown = False self._jsonrpc_stream_reader.listen(self._endpoint.consume) def m_ping(self, data): """ping & pong (check if daemon is alive).""" self.log.info("pinged with %s", data) return {"pong": data} def _execute_and_capture_output(self, func): fout = io.StringIO() ferr = io.StringIO() with redirect_stdout(fout): with redirect_stderr(ferr): func() output = {} if fout.tell(): output["stdout"] = fout.getvalue() if ferr.tell(): output["stderr"] = ferr.getvalue() return output def close(self): self.log.info("Closing rpc channel") self._shutdown = True self._endpoint.shutdown() self._jsonrpc_stream_reader.close() self._jsonrpc_stream_writer.close() def m_exit(self, **_kwargs): self.close() @error_decorator def m_exec_file(self, file_name, args=[], cwd=None, env=None): args = [] if args is None else args self.log.info("Exec file %s with args %s", file_name, args) def exec_file(): self.log.info("execute file %s", file_name) runpy.run_path(file_name, globals()) with change_exec_context(args, cwd, env): return self._execute_and_capture_output(exec_file) @error_decorator def m_exec_file_observable(self, file_name, args=[], cwd=None, env=None): args = [] if args is None else args old_argv, sys.argv = sys.argv, [""] + args self.log.info("Exec file (observale) %s with args %s", file_name, args) with change_exec_context(args, cwd, env): runpy.run_path(file_name, globals()) @error_decorator def m_exec_module(self, module_name, args=[], cwd=None, env=None): args = [] if args is None else args self.log.info("Exec module %s with args %s", module_name, args) if args[-1] == "--version": return self._get_module_version(module_name, args) def exec_module(): self.log.info("execute module %s", module_name) runpy.run_module(module_name, globals(), run_name="__main__") with change_exec_context(args, cwd, env): return self._execute_and_capture_output(exec_module) @error_decorator def m_exec_module_observable(self, module_name, args=None, cwd=None, env=None): args = [] if args is None else args self.log.info("Exec module (observable) %s with args %s", module_name, args) with change_exec_context(args, cwd, env): runpy.run_module(module_name, globals(), run_name="__main__") def _get_module_version(self, module_name, args): """We handle `-m pip --version` as a special case. As this causes the current process to die. These CLI commands are meant for CLI (i.e. kill process once done). """ args = [] if args is None else args if module_name == "jupyter" and args[0] != "--version": # This means we're trying to get a version of a sub command. # E.g. python -m jupyter notebook --version. # In such cases, use the subcommand. We can ignore jupyter. module_name = args[0] try: self.log.info("getting module_version %s", module_name) m = importlib.import_module(module_name) return {"stdout": m.__version__} except Exception: return {"error": traceback.format_exc()} def m_get_executable(self): return {"path": sys.executable} def m_get_interpreter_information(self): return { "versionInfo": tuple(sys.version_info), "sysPrefix": sys.prefix, "version": sys.version, } def m_is_module_installed(self, module_name=None): return {"exists": self._is_module_installed(module_name)} def _is_module_installed(self, module_name=None): try: importlib.import_module(module_name) return True except Exception: return False @classmethod def start_daemon(cls, logging_queue_handler=None): """ Starts the daemon. """ if not issubclass(cls, PythonDaemon): raise ValueError( "Handler class must be an instance of PythonDaemon") log.info("Starting %s Daemon", cls.__name__) def on_write_stdout(output): server._endpoint.notify("output", { "source": "stdout", "out": output }) def on_write_stderr(output): server._endpoint.notify("output", { "source": "stderr", "out": output }) stdin, stdout = get_io_buffers() server = cls(stdin, stdout) redirect_output(on_write_stdout, on_write_stderr) # Set up the queue handler that'll send log messages over to the client. if logging_queue_handler is not None: logging_queue_handler.set_server(server) server.start()
class PythonLanguageServer(MethodDispatcher): def __init__(self, reader, writer): self._jsonrpcstreamReader = JsonRpcStreamReader(reader) self._jsonrpcstreamWriter = JsonRpcStreamWriter(writer) self._endpoint = Endpoint(self, self._jsonrpcstreamWriter.write) self.isrunning = True self.workspace = None self.config = None def run(self): self._jsonrpcstreamReader.listen(self._endpoint.consume) def m_shutdown(self): self.isrunning = False def m_exit(self): self._jsonrpcstreamReader.close() self._jsonrpcstreamWriter.close() self._endpoint.shutdown() self.workspace = None def __getitem__(self, item): log.info(item) return super(PythonLanguageServer, self).__getitem__(item) def capablilities(self): import mypy_server is_patched_mypy = mypy_server.is_patched_mypy() if not is_patched_mypy: log.info( 'Using non-patched mypy, rich language features not available.' ) python_38 = sys.version_info >= (3, 8) if not python_38: log.info( 'Using Python before 3.8, rich language features not available.' ) rich_analysis_available = is_patched_mypy and python_38 # 这三个功能不知道是干嘛用的 server_capabilities = { "textDocumentSync": lsp.TextDocumentSyncKind.FULL, # full document text 'definitionProvider': rich_analysis_available, 'hoverProvider': rich_analysis_available } return server_capabilities def m_initialize(self, processId=None, rootUri=None, rootPath=None, initializationOptions=None, **_kwargs): log.info('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) self.workspace = WorkSpace(rootUri, self._endpoint) try: import mypy except ImportError: log.error('Do not install mypy module!') self.workspace.show_message('Mypy is not installed.', lsp.MessageType.Warning) return {'capablilities': None} self.mypyserver = mypy_server.Server(mypy_server.options, mypy_server.DEFAULT_STATUS_FILE) return {"capabilities": self.capablilities()} def m_initialized(self, **_kwargs): pass def m_text_document__did_open(self, textDocument=None, **_kwargs): self.workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): log.info(contentChanges) for change in contentChanges: self.workspace.update_document(textDocument['uri'], change, version=textDocument.get('version')) log.info(self.workspace._docs.items()) to_check = [] for uri, doc in self.workspace._docs.items(): fspath = uris.to_fs_path(uri) to_check.append(fspath) if mypy_server.mypy_version > "0.720": result = self.mypyserver.cmd_check(to_check, False, 80) else: result = self.mypyserver.cmd_check(to_check) diags = mypy_server.parse_mypy_out(result['out']) diagsparams = PublishDiagnosticParams(uri, diags).getDict() log.info(diagsparams) self.workspace.publish_diagnostics(diagsparams['uri'], diagsparams['diagnostics']) def m_text_document__did_save(self, textDocument=None, **_kwargs): import mypy_server mypy_server.mypy_check(self.workspace, self.config)