def test_GetDefinitionMatchingDependency(self): source = p.join(TEST_PROJECT, "basic_library", "use_entity_a_and_b.vhd") target = p.join(TEST_PROJECT, "basic_library", "two_entities_one_file.vhd") definitions = self.server.definitions( uris.from_fs_path(source), {"line": 1, "character": 9} ) self.assertIn( { "uri": uris.from_fs_path(target), "range": { "start": {"line": 1, "character": 7}, "end": {"line": 1, "character": 15}, }, }, definitions, ) self.assertIn( { "uri": uris.from_fs_path(target), "range": { "start": {"line": 4, "character": 7}, "end": {"line": 4, "character": 15}, }, }, definitions, )
def test_jedi_rename(tmp_workspace, config): # pylint: disable=redefined-outer-name # rename the `Test1` class position = {'line': 0, 'character': 6} DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME)) doc = Document(DOC_URI, tmp_workspace) result = pyls_rename(config, tmp_workspace, doc, position, 'ShouldBeRenamed') assert len(result.keys()) == 1 changes = result.get('documentChanges') assert len(changes) == 2 assert changes[0]['textDocument']['uri'] == doc.uri assert changes[0]['textDocument']['version'] == doc.version assert changes[0].get('edits') == [{ 'range': { 'start': { 'line': 0, 'character': 0 }, 'end': { 'line': 5, 'character': 0 }, }, 'newText': 'class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n', }] path = os.path.join(tmp_workspace.root_path, DOC_NAME_EXTRA) uri_extra = uris.from_fs_path(path) assert changes[1]['textDocument']['uri'] == uri_extra # This also checks whether documents not yet added via textDocument/didOpen # but that do need to be renamed in the project have a `null` version # number. assert changes[1]['textDocument']['version'] is None expected = 'from test1 import ShouldBeRenamed\nx = ShouldBeRenamed()\n' if os.name == 'nt': # The .write method in the temp_workspace_factory functions writes # Windows-style line-endings. expected = expected.replace('\n', '\r\n') assert changes[1].get('edits') == [{ 'range': { 'start': { 'line': 0, 'character': 0 }, 'end': { 'line': 2, 'character': 0 } }, 'newText': expected }]
def _checkLintFileOnMethod(self, source, method): with patch.object(self.server.workspace, "publish_diagnostics"): _logger.info("Sending %s request", method) getattr(self.server, method)( textDocument={"uri": unicode(uris.from_fs_path(source)), "text": None} ) mock_call = _waitOnMockCall(self.server.workspace.publish_diagnostics) doc_uri, diagnostics = mock_call[1] _logger.info("doc_uri: %s", doc_uri) _logger.info("diagnostics: %s", diagnostics) self.assertEqual(doc_uri, uris.from_fs_path(source)) return diagnostics
def parse_tag(line, query): match = TAG_RE.match(line) log.info("Got match %s from line: %s", match, line) log.info("Line: %s", line.replace('\t', '\\t').replace(' ', '\\s')) if not match: return None name = match.group('name') # fuzzy match -- if substring match of 90% or better fuzzy_score = fuzz.partial_ratio(query.lower(), name.lower()) if fuzzy_score < 90: return None line = int(match.group('line')) - 1 return { 'name': name, 'kind': CTAG_SYMBOL_MAPPING.get(match.group('type'), SymbolKind.Null), 'location': { 'uri': uris.from_fs_path(match.group('file')), 'range': { 'start': { 'line': line, 'character': 0 }, 'end': { 'line': line, 'character': 0 } } }, 'score': fuzzy_score # levenshtein ratio }
def test_root_project_with_no_setup_py(pyls): """Default to workspace root.""" workspace_root = pyls.workspace.root_path test_uri = uris.from_fs_path(os.path.join(workspace_root, 'hello/test.py')) pyls.workspace.put_document(test_uri, 'assert True') test_doc = pyls.workspace.get_document(test_uri) assert workspace_root in test_doc.sys_path()
def test_rope_rename(tmp_workspace, config): # pylint: disable=redefined-outer-name position = {"line": 0, "character": 6} DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME)) doc = Document(DOC_URI, tmp_workspace) result = pyls_rename(config, tmp_workspace, doc, position, "ShouldBeRenamed") assert len(result.keys()) == 1 changes = result.get("documentChanges") assert len(changes) == 1 changes = changes[0] # Note that this test differs from test_jedi_rename, because rope does not # seem to modify files that haven't been opened with textDocument/didOpen. assert changes.get("edits") == [{ "range": { "start": { "line": 0, "character": 0 }, "end": { "line": 5, "character": 0 }, }, "newText": "class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n", }]
def test_jedi_rename(tmp_workspace, config): # pylint: disable=redefined-outer-name # rename the `Test1` class position = {'line': 0, 'character': 6} DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME)) doc = Document(DOC_URI, tmp_workspace) result = pyls_rename(config, tmp_workspace, doc, position, 'ShouldBeRenamed') assert len(result.keys()) == 1 changes = result.get('documentChanges') assert len(changes) == 1 changes = changes[0] assert changes.get('edits') == [{ 'range': { 'start': { 'line': 0, 'character': 0 }, 'end': { 'line': 5, 'character': 0 }, }, 'newText': 'class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n', }]
def test_per_file_caching(): # Ensure that diagnostics are cached per-file. with temp_document(DOC) as doc: assert pylint_lint.pyls_lint(doc, True) assert not pylint_lint.pyls_lint(Document(uris.from_fs_path(__file__)), False)
def test_references(tmp_workspace): # pylint: disable=redefined-outer-name # Over 'Test1' in class Test1(): position = {'line': 0, 'character': 8} DOC1_URI = uris.from_fs_path( os.path.join(tmp_workspace.root_path, DOC1_NAME)) doc1 = Document(DOC1_URI) refs = pyls_references(doc1, position) # Definition, the import and the instantiation assert len(refs) == 3 # Briefly check excluding the definitions (also excludes imports, only counts uses) no_def_refs = pyls_references(doc1, position, exclude_declaration=True) assert len(no_def_refs) == 1 # Make sure our definition is correctly located doc1_ref = [u for u in refs if u['uri'] == DOC1_URI][0] assert doc1_ref['range']['start'] == {'line': 0, 'character': 6} assert doc1_ref['range']['end'] == {'line': 0, 'character': 11} # Make sure our import is correctly located doc2_import_ref = [u for u in refs if u['uri'] != DOC1_URI][0] assert doc2_import_ref['range']['start'] == {'line': 0, 'character': 18} assert doc2_import_ref['range']['end'] == {'line': 0, 'character': 23} doc2_usage_ref = [u for u in refs if u['uri'] != DOC1_URI][1] assert doc2_usage_ref['range']['start'] == {'line': 3, 'character': 4} assert doc2_usage_ref['range']['end'] == {'line': 3, 'character': 9}
def pyls_rename(config, workspace, document, position, new_name): # pylint: disable=unused-argument log.debug('Executing rename of %s to %s', document.word_at_position(position), new_name) kwargs = _utils.position_to_jedi_linecolumn(document, position) kwargs['new_name'] = new_name try: refactoring = document.jedi_script().rename(**kwargs) except NotImplementedError: raise Exception('No support for renaming in Python 2/3.5 with Jedi. ' 'Consider using the rope_rename plugin instead') log.debug('Finished rename: %s', refactoring.get_diff()) changes = [] for file_path, changed_file in refactoring.get_changed_files().items(): uri = uris.from_fs_path(file_path) doc = workspace.get_maybe_document(uri) changes.append({ 'textDocument': { 'uri': uri, 'version': doc.version if doc else None }, 'edits': [ { 'range': { 'start': {'line': 0, 'character': 0}, 'end': { 'line': _num_lines(changed_file.get_new_code()), 'character': 0, }, }, 'newText': changed_file.get_new_code(), } ], }) return {'documentChanges': changes}
def test_HoverOnDesignUnit(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") very_common_pkg = p.join(TEST_PROJECT, "basic_library", "very_common_pkg.vhd") package_with_constants = p.join( TEST_PROJECT, "basic_library", "package_with_constants.vhd" ) clock_divider = p.join(TEST_PROJECT, "basic_library", "clock_divider.vhd") expected = [ "Build sequence for %s is" % str(path_to_foo), "", tabulate( [ (1, "basic_library", str(very_common_pkg)), (2, "basic_library", str(package_with_constants)), (3, "basic_library", str(clock_divider)), (4, DEFAULT_LIBRARY.name, str(path_to_foo)), ], headers=("#", "Library", "Path"), tablefmt="plain", ), ] self.assertDictEqual( self.server.hover( uris.from_fs_path(path_to_foo), {"line": 7, "character": 7} ), {"contents": "\n".join(expected)}, )
def test_per_file_caching(config, workspace): # Ensure that diagnostics are cached per-file. with temp_document(DOC, workspace) as doc: assert pylint_lint.pyls_lint(config, doc, True) assert not pylint_lint.pyls_lint( config, Document(uris.from_fs_path(__file__), workspace), False)
def references(self, doc_uri, position, exclude_declaration): # type: (URI, Dict[str, int], bool) -> Any element = self._getElementAtPosition( Path(to_fs_path(doc_uri)), Location(line=position["line"], column=position["character"]), ) # Element not identified if element is None: return None references = [] # type: List[Dict[str, Any]] if not exclude_declaration: for line, column in element.locations: references += [{ "uri": from_fs_path(str(element.owner)), "range": { "start": { "line": line, "character": column }, "end": { "line": line, "character": column }, }, }] for reference in self.checker.database.getReferencesToDesignUnit( element): for line, column in reference.locations: references += [{ "uri": from_fs_path(str(reference.owner)), "range": { "start": { "line": line, "character": column }, "end": { "line": line, "character": column }, }, }] return references
def _make_document(content): tmp = tempfile.NamedTemporaryFile(prefix='pylstest', mode='w', delete=False) tmp.write(content) tmp.close() created_files.append(tmp.name) return Document(uris.from_fs_path(tmp.name), content)
def test_basic(workspace, config): doc = Document(uris.from_fs_path(str(data / "file.py")), workspace) diagnostics = pyls_lint(config, doc) assert diagnostics == [ build_diagnostic("foo", (7, 4), (7, 7), "deprecated at some point"), build_diagnostic("imported", (9, 0), (9, 8), "test reason"), ]
def test(dump, generate, watched_file): _initializeServer( it.server, params={"rootUri": uris.from_fs_path(TEST_PROJECT)} ) watched_file.assert_called_once() generate.assert_called_once() # Will get called twice dump.assert_called()
def test(): _initializeServer( it.server, params={ "rootUri": uris.from_fs_path(TEST_PROJECT), "initializationOptions": {"project_file": "vimhdl.prj"}, }, )
def temp_document(doc_text): temp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) name = temp_file.name temp_file.write(doc_text) temp_file.close() doc = Document(uris.from_fs_path(name), MockWorkspace()) return name, doc
def test_GetDefinitionNotKnown(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") self.assertEqual( self.server.definitions( uris.from_fs_path(path_to_foo), {"line": 0, "character": 0} ), [], )
def pyls(tmpdir): """ Return an initialized python LS """ ls = PythonLanguageServer(StringIO, StringIO) ls.m_initialize(processId=1, rootUri=uris.from_fs_path(str(tmpdir)), initializationOptions={}) return ls
def execute(ctags_exe, tag_file, directory, append=False): """Run ctags against the given directory.""" # Ensure the directory exists tag_file_dir = os.path.dirname(tag_file) if not os.path.exists(tag_file_dir): os.makedirs(tag_file_dir) cmd = [ ctags_exe, '-f', uris.to_fs_path(uris.from_fs_path(tag_file)), '--languages=Python', '-R' ] + CTAG_OPTIONS if append: cmd.append('--append') cmd.append(uris.to_fs_path(uris.from_fs_path(directory))) log.info("Executing exuberant ctags: %s", cmd) log.info("ctags: %s", subprocess.check_output(cmd))
def temp_document(doc_text, workspace): try: temp_file = tempfile.NamedTemporaryFile(mode='w', delete=False) name = temp_file.name temp_file.write(doc_text) temp_file.close() yield Document(uris.from_fs_path(name), workspace) finally: os.remove(name)
def test_pycodestyle_config(workspace): """ Test that we load config files properly. Config files are loaded in the following order: tox.ini pep8.cfg setup.cfg pycodestyle.cfg Each overriding the values in the last. These files are first looked for in the current document's directory and then each parent directory until any one is found terminating at the workspace root. If any section called 'pycodestyle' exists that will be solely used and any config in a 'pep8' section will be ignored """ doc_uri = uris.from_fs_path(os.path.join(workspace.root_path, 'test.py')) workspace.put_document(doc_uri, DOC) doc = workspace.get_document(doc_uri) # Make sure we get a warning for 'indentation contains tabs' diags = pycodestyle_lint.pyls_lint(workspace, doc) assert [d for d in diags if d['code'] == 'W191'] content = { 'setup.cfg': ('[pycodestyle]\nignore = W191, E201, E128', True), 'tox.ini': ('', False) } for conf_file, (content, working) in list(content.items()): # Now we'll add config file to ignore it with open(os.path.join(workspace.root_path, conf_file), 'w+') as f: f.write(content) workspace._config.settings.cache_clear() # And make sure we don't get any warnings diags = pycodestyle_lint.pyls_lint(workspace, doc) assert len([d for d in diags if d['code'] == 'W191']) == (0 if working else 1) assert len([d for d in diags if d['code'] == 'E201']) == (0 if working else 1) assert [d for d in diags if d['code'] == 'W391'] os.unlink(os.path.join(workspace.root_path, conf_file)) # Make sure we can ignore via the PYLS config as well workspace._config.update( {'plugins': { 'pycodestyle': { 'ignore': ['W191', 'E201'] } }}) # And make sure we only get one warning diags = pycodestyle_lint.pyls_lint(workspace, doc) assert not [d for d in diags if d['code'] == 'W191'] assert not [d for d in diags if d['code'] == 'E201'] assert [d for d in diags if d['code'] == 'W391']
def test_HoverOnDependency(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") clock_divider = p.join(TEST_PROJECT, "basic_library", "clock_divider.vhd") self.assertDictEqual( self.server.hover( uris.from_fs_path(path_to_foo), {"line": 32, "character": 32} ), {"contents": 'Path "%s", library "basic_library"' % clock_divider}, )
def test_config_file(tmpdir): # a config file in the same directory as the source file will be used conf = tmpdir.join('.style.yapf') conf.write('[style]\ncolumn_limit = 14') src = tmpdir.join('test.py') doc = Document(uris.from_fs_path(src.strpath), DOC) # A was split on multiple lines because of column_limit from config file assert pyls_format_document(doc)[0][ 'newText'] == "A = [\n 'h', 'w',\n 'a'\n]\n\nB = ['h', 'w']\n"
def test(): it.project_file = "__some_project_file.prj" it.assertFalse(p.exists(it.project_file)) _initializeServer( it.server, params={ "rootUri": uris.from_fs_path(TEST_PROJECT), "initializationOptions": {"project_file": it.project_file}, }, )
def test_references_builtin(tmp_workspace): # pylint: disable=redefined-outer-name # Over 'UnicodeError': position = {'line': 4, 'character': 7} doc2_uri = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC2_NAME)) doc2 = Document(doc2_uri) refs = pyls_references(doc2, position) assert len(refs) == 1 assert refs[0]['range']['start'] == {'line': 4, 'character': 7} assert refs[0]['range']['end'] == {'line': 4, 'character': 19}
def test_non_root_project(pyls): repo_root = os.path.join(pyls.workspace.root_path, 'repo-root') os.mkdir(repo_root) project_root = os.path.join(repo_root, 'project-root') os.mkdir(project_root) with open(os.path.join(project_root, 'setup.py'), 'w+') as f: f.write('# setup.py') test_uri = uris.from_fs_path(os.path.join(project_root, 'hello/test.py')) pyls.workspace.put_document(test_uri, 'assert True') test_doc = pyls.workspace.get_document(test_uri) assert project_root in pyls.workspace.syspath_for_path(test_doc.path)
def test_non_root_project(pyls, metafiles): repo_root = os.path.join(pyls.workspace.root_path, 'repo-root') os.mkdir(repo_root) project_root = os.path.join(repo_root, 'project-root') os.mkdir(project_root) for metafile in metafiles: with open(os.path.join(project_root, metafile), 'w+') as f: f.write('# ' + metafile) test_uri = uris.from_fs_path(os.path.join(project_root, 'hello/test.py')) pyls.workspace.put_document(test_uri, 'assert True') test_doc = pyls.workspace.get_document(test_uri) assert project_root in test_doc.sys_path()
def test_ReferencesOfAnInvalidElement(self): path_to_foo = p.join(TEST_PROJECT, "another_library", "foo.vhd") # Make sure there's no element at this location unit = self.server._getElementAtPosition(Path(path_to_foo), Location(0, 0)) self.assertIsNone(unit) for exclude_declaration in (True, False): self.assertIsNone( self.server.references( doc_uri=uris.from_fs_path(path_to_foo), position={"line": 0, "character": 0}, exclude_declaration=exclude_declaration, ) )