def __init__(self, root_uri, init_opts): self._root_path = uris.to_fs_path(root_uri) self._root_uri = root_uri self._init_opts = init_opts self._disabled_plugins = [] self._settings = {} self._plugin_settings = {} self._config_sources = { 'flake8': Flake8Config(self._root_path), 'pycodestyle': PyCodeStyleConfig(self._root_path) } self._pm = pluggy.PluginManager(PYLS) self._pm.trace.root.setwriter(log.debug) self._pm.enable_tracing() self._pm.add_hookspecs(hookspecs) self._pm.load_setuptools_entrypoints(PYLS) for name, plugin in self._pm.list_name_plugin(): log.info("Loaded pyls plugin %s from %s", name, plugin) for plugin_conf in self._pm.hook.pyls_settings(config=self): self._plugin_settings = _utils.merge_dicts(self._plugin_settings, plugin_conf)
def execute(ctags_exe, tag_file, directory, append=False): """Run ctags against the given directory.""" # Ensure the directory exists tag_file_dir = os.path.dirname(tag_file) if not os.path.exists(tag_file_dir): os.makedirs(tag_file_dir) cmd = [ ctags_exe, '-f', uris.to_fs_path(uris.from_fs_path(tag_file)), '--languages=Python', '-R' ] + CTAG_OPTIONS if append: cmd.append('--append') cmd.append(uris.to_fs_path(uris.from_fs_path(directory))) log.info("Executing exuberant ctags: %s", cmd) log.info("ctags: %s", subprocess.check_output(cmd))
def _onConfigUpdate(self, options): # type: (...) -> Any """ Updates the checker server from options if the 'project_file' key is present. Please not that this is run from both initialize and workspace/did_change_configuration and when ran initialize the LSP client might not ready to take messages. To circumvent this, make sure m_initialize returns before calling this to actually configure the server. """ if not self.workspace or not self.workspace.root_uri: return root_dir = to_fs_path(self.workspace.root_uri) self._checker = Server(self.workspace, root_dir=Path(root_dir)) _logger.debug("Updating from %s", options) # Clear previous diagnostics self._global_diags = set() path = self._getProjectFilePath(options) try: self.checker.setConfig(path, origin=ConfigFileOrigin.user) return except UnknownParameterError as exc: _logger.info("Failed to read config from %s: %s", path, exc) return except FileNotFoundError: # If the file couldn't be found, proceed to searching the root # URI (if it has been set) pass if not self.workspace or not self.workspace.root_path: _logger.debug( "No workspace and/or root path not set, can't search files") return self.showInfo("Searching {} for HDL files...".format( self.workspace.root_path)) # Having no project file but with root URI triggers searching for # sources automatically config = SimpleFinder([self.workspace.root_path]).generate() # Write this to a file and tell the server to use it auto_project_file = getTemporaryFilename(AUTO_PROJECT_FILE_NAME) json.dump(config, open(auto_project_file, "w")) self.checker.setConfig(auto_project_file, origin=ConfigFileOrigin.generated)
def __init__(self, root_uri, init_opts, process_id, capabilities): self._root_path = uris.to_fs_path(root_uri) self._root_uri = root_uri self._init_opts = init_opts self._process_id = process_id self._capabilities = capabilities self._settings = {} self._plugin_settings = {} self._config_sources = {} try: from .flake8_conf import Flake8Config self._config_sources['flake8'] = Flake8Config(self._root_path) except ImportError: pass try: from .pycodestyle_conf import PyCodeStyleConfig self._config_sources['pycodestyle'] = PyCodeStyleConfig( self._root_path) except ImportError: pass self._pm = pluggy.PluginManager(PYLS) self._pm.trace.root.setwriter(log.debug) self._pm.enable_tracing() self._pm.add_hookspecs(hookspecs) # Pluggy will skip loading a plugin if it throws a DistributionNotFound exception. # However I don't want all plugins to have to catch ImportError and re-throw. So here we'll filter # out any entry points that throw ImportError assuming one or more of their dependencies isn't present. for entry_point in pkg_resources.iter_entry_points(PYLS): try: entry_point.load() except ImportError as e: log.warning("Failed to load %s entry point '%s': %s", PYLS, entry_point.name, e) self._pm.set_blocked(entry_point.name) # Load the entry points into pluggy, having blocked any failing ones self._pm.load_setuptools_entrypoints(PYLS) for name, plugin in self._pm.list_name_plugin(): if plugin is not None: log.info("Loaded pyls plugin %s from %s", name, plugin) for plugin_conf in self._pm.hook.pyls_settings(config=self): self._plugin_settings = _utils.merge_dicts(self._plugin_settings, plugin_conf) self._update_disabled_plugins()
def definitions(self, doc_uri, position): # type: (...) -> Any doc_path = Path(to_fs_path(doc_uri)) dependency = self._getElementAtPosition( doc_path, Location(line=position["line"], column=position["character"])) if not isinstance(dependency, BaseDependencySpec): _logger.debug("Go to definition not supported for item %s", dependency) return [] # Work out where this dependency refers to info = self.checker.resolveDependencyToPath(dependency) if info is None: _logger.debug("Unable to resolve %s to a path", dependency) return [] _logger.info("Dependency %s resolved to %s", dependency, info) # Make the response target_path, _ = info target_uri = from_fs_path(str(target_path)) locations = [] # type: List[Dict[str, Any]] # Get the design unit that has matched the dependency to extract the # location where it's defined for unit in self.checker.database.getDesignUnitsByPath(target_path): if unit.name == dependency.name and unit.locations: for line, column in unit.locations: locations += [{ "uri": target_uri, "range": { "start": { "line": line, "character": column }, "end": { "line": line, "character": column + len(unit) }, }, }] return locations
def references(self, doc_uri, position, exclude_declaration): # type: (URI, Dict[str, int], bool) -> Any element = self._getElementAtPosition( Path(to_fs_path(doc_uri)), Location(line=position["line"], column=position["character"]), ) # Element not identified if element is None: return None references = [] # type: List[Dict[str, Any]] if not exclude_declaration: for line, column in element.locations: references += [{ "uri": from_fs_path(str(element.owner)), "range": { "start": { "line": line, "character": column }, "end": { "line": line, "character": column }, }, }] for reference in self.checker.database.getReferencesToDesignUnit( element): for line, column in reference.locations: references += [{ "uri": from_fs_path(str(reference.owner)), "range": { "start": { "line": line, "character": column }, "end": { "line": line, "character": column }, }, }] return references
def hover(self, doc_uri, position): # type: (URI, Dict[str, int]) -> Any path = Path(to_fs_path(doc_uri)) # Check if the element under the cursor matches something we know element = self._getElementAtPosition( path, Location(line=position["line"], column=position["character"])) _logger.debug("Getting info from %s", element) if isinstance(element, (VerilogDesignUnit, VhdlDesignUnit)): return {"contents": self._getBuildSequenceForHover(path)} if isinstance(element, BaseDependencySpec): return {"contents": self._getDependencyInfoForHover(element)} return None
def lint(self, doc_uri, is_saved): # type: (URI, bool) -> Any _logger.info("linting: %s", doc_uri) diags = set(self._getDiags(doc_uri, is_saved)) # Separate the diagnostics in filename groups to publish diagnostics # referring to all paths paths = {diag.filename for diag in diags} # Add doc_uri to the set to trigger clearing diagnostics when it's not # present paths.add(Path(to_fs_path(doc_uri))) for path in paths: self.workspace.publish_diagnostics( from_fs_path(str(path)), list( checkerDiagToLspDict(diag) for diag in diags if diag.filename == path), )
def _getDiags(self, doc_uri, is_saved): # type: (URI, bool) -> Iterable[CheckerDiagnostic] """ Gets diags of the URI, wether from the saved file or from its contents; returns an iterable containing the diagnostics of the doc_uri and other URIs that were compiled as dependencies and generated diagnostics with severity higher than error """ if self.checker is None: # pragma: no cover _logger.debug("No checker, won't try to get diagnostics") return () # If the file has not been saved, use the appropriate method, which # will involve dumping the modified contents into a temporary file path = Path(to_fs_path(doc_uri)) _logger.info("Linting %s (saved=%s)", repr(path), is_saved) if is_saved: return self.checker.getMessagesByPath(path) text = self.workspace.get_document(doc_uri).source return self.checker.getMessagesWithText(path, text)
def find_parents(self, path, names): root_path = uris.to_fs_path(self._root_uri) return _utils.find_parents(root_path, path, names)
def test_win_to_fs_path(uri, path): assert uris.to_fs_path(uri) == path