def _threaded_code_format(self, text_document, options, monitor: IMonitor): from robotframework_ls.impl.formatting import robot_source_format from robotframework_ls.impl.formatting import create_text_edit_from_diff from robocorp_ls_core.lsp import TextDocumentItem text_document_item = TextDocumentItem(**text_document) text = text_document_item.text if not text: completion_context = self._create_completion_context( text_document_item.uri, 0, 0, monitor ) if completion_context is None: return [] text = completion_context.doc.source if not text: return [] if options is None: options = {} tab_size = options.get("tabSize", 4) new_contents = robot_source_format(text, space_count=tab_size) if new_contents is None or new_contents == text: return [] return [x.to_dict() for x in create_text_edit_from_diff(text, new_contents)]
def m_text_document__did_open(self, textDocument=None, **_kwargs) -> None: from robocorp_ls_core.lsp import TextDocumentItem ws = self.workspace if ws is not None: ws.put_document(TextDocumentItem(**textDocument)) self.lint(textDocument["uri"], is_saved=True)
def test_typing_not_shown(libspec_manager, workspace, data_regression, workspace_dir): from robocorp_ls_core import uris from os.path import os from robotframework_ls_tests.fixtures import LIBSPEC_3 from robotframework_ls.impl import keyword_completions from robotframework_ls.impl.completion_context import CompletionContext from robocorp_ls_core.lsp import TextDocumentItem workspace_dir_a = os.path.join(workspace_dir, "workspace_dir_a") os.makedirs(workspace_dir_a) with open(os.path.join(workspace_dir_a, "my.libspec"), "w") as stream: stream.write(LIBSPEC_3) libspec_manager.add_workspace_folder(uris.from_fs_path(workspace_dir_a)) assert libspec_manager.get_library_info("case3_library", create=False) is not None workspace.set_root(workspace_dir, libspec_manager=libspec_manager) doc = workspace.ws.put_document(TextDocumentItem("temp_doc.robot", text="")) doc.source = """*** Settings *** Library case3_library *** Test Cases *** Can use resource keywords Case Verify""" completions = keyword_completions.complete( CompletionContext(doc, workspace=workspace.ws)) data_regression.check(completions)
def test_document_from_file(workspace, workspace_dir, cases): from os.path import os from robocorp_ls_core import uris from robocorp_ls_core.lsp import TextDocumentItem cases.copy_to("case1", workspace_dir) workspace.set_root(workspace_dir) ws = workspace.ws case1_file = os.path.join(workspace_dir, "case1.robot") assert os.path.exists(case1_file) case1_doc_uri = uris.from_fs_path(case1_file) resource_doc = ws.get_document(case1_doc_uri, accept_from_file=False) assert resource_doc is None cached_doc = ws.get_document(case1_doc_uri, accept_from_file=True) assert cached_doc is not None assert "*** Settings ***" in cached_doc.source with open(case1_file, "w") as stream: stream.write("new contents") assert "*** Settings ***" in cached_doc.source # i.e.: Unchanged # When we get it again it verifies the filesystem. cached_doc2 = ws.get_document(case1_doc_uri, accept_from_file=True) assert cached_doc is not cached_doc2 # i.e.: Unchanged initial version in memory assert "*** Settings ***" in cached_doc.source assert cached_doc2.source == "new contents" # Still None if we can't accept cached. resource_doc = ws.get_document(case1_doc_uri, accept_from_file=False) assert resource_doc is None ws.put_document(TextDocumentItem(case1_doc_uri, text="rara")) resource_doc = ws.get_document(case1_doc_uri, accept_from_file=False) assert resource_doc is not None assert resource_doc is not cached_doc ws.remove_document(case1_doc_uri) resource_doc = ws.get_document(case1_doc_uri, accept_from_file=False) assert resource_doc is None cached_doc3 = ws.get_document(case1_doc_uri, accept_from_file=True) assert cached_doc3 is not None # i.e.: it should've been pruned when the doc was added. assert cached_doc3 is not cached_doc assert cached_doc3.source == "new contents" os.remove(case1_file) cached_doc4 = ws.get_document(case1_doc_uri, accept_from_file=True) assert cached_doc4 is None # The old one in memory doesn't change after the file is removed assert cached_doc3.source == "new contents"
def test_completion_not_duplicated(workspace, cases, libspec_manager, workspace_dir): from robocorp_ls_core.lsp import TextDocumentItem import os.path from robocorp_ls_core import uris from robotframework_ls.robot_config import RobotConfig from robotframework_ls.impl.collect_keywords import ( collect_keyword_name_to_keyword_found, ) from robotframework_ls.impl.protocols import IKeywordFound cases.copy_to("case_inner_keywords", workspace_dir) workspace.set_root(workspace_dir, libspec_manager=libspec_manager) config = RobotConfig() config.update({"robot.pythonpath": [workspace_dir]}) libspec_manager.config = config uri = uris.from_fs_path(os.path.join(workspace_dir, "inner", "case_inner.robot")) workspace.ws.put_document( TextDocumentItem( uri, text="""*** Settings *** Resource case_root.robot *** Keywords *** Check with keyword at inner [Arguments] ${arg1} ${arg2} Should Be Equal ${arg1} ${arg2} *** Test Cases *** Testing Completion Here Check with ke""", ) ) doc = workspace.ws.get_document(uri, accept_from_file=False) from robotframework_ls.impl.completion_context import CompletionContext from robotframework_ls.impl import auto_import_completions workspace.ws.wait_for_check_done(5) context = CompletionContext(doc, workspace=workspace.ws, config=config) keyword_name_to_keyword_found: Dict[ str, List[IKeywordFound] ] = collect_keyword_name_to_keyword_found(context) completions = auto_import_completions.complete( context, keyword_name_to_keyword_found ) # I.e.: all the related symbols are already imported and will be shown # in the regular completion. assert len(completions) == 0
def test_completion_with_auto_import_duplicated(workspace, setup_case2_in_dir_doc): from robotframework_ls.impl.completion_context import CompletionContext from robotframework_ls.impl import auto_import_completions from robotframework_ls.impl.protocols import IKeywordFound from robotframework_ls.impl.collect_keywords import ( collect_keyword_name_to_keyword_found, ) from robocorp_ls_core.lsp import TextDocumentItem doc = workspace.get_doc("case1.robot") doc.source = """ *** Keywords *** KeywordInCase1 In Lib 2 """ # Just create a dummy entry for KeywordInCase1. doc_foobar = workspace.ws.put_document( TextDocumentItem( "foobar", text=""" *** Keywords *** KeywordInCase1 In Lib 2 """, )) keyword_name_to_keyword_found: Dict[ str, List[IKeywordFound]] = collect_keyword_name_to_keyword_found( CompletionContext(doc_foobar, workspace=workspace.ws)) doc2 = setup_case2_in_dir_doc doc2.source = """ *** Test Cases *** User can call library KeywordInCa""" completions = auto_import_completions.complete( CompletionContext(doc2, workspace=workspace.ws), keyword_name_to_keyword_found) assert len(completions) == 1 apply_completion(doc2, completions[0]) assert (doc2.source == """*** Settings *** Resource ../case1.robot *** Test Cases *** User can call library case1.KeywordInCase1""")
def setup_case2_doc(workspace, cases, libspec_manager, workspace_dir): from robocorp_ls_core.lsp import TextDocumentItem import os.path from robocorp_ls_core import uris cases.copy_to("case1", workspace_dir) workspace.set_root(workspace_dir, libspec_manager=libspec_manager) uri = uris.from_fs_path(os.path.join(workspace_dir, "case2.robot")) workspace.ws.put_document( TextDocumentItem( uri, text=""" *** Test Cases *** User can call library Verify another m""", )) doc = workspace.ws.get_document(uri, accept_from_file=False) return doc
def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): from robocorp_ls_core.lsp import TextDocumentItem from robocorp_ls_core.lsp import TextDocumentContentChangeEvent if contentChanges: text_document_item = TextDocumentItem(**textDocument) for change in contentChanges: try: range = change.get("range", None) range_length = change.get("rangeLength", 0) text = change.get("text", "") self.workspace.update_document( text_document_item, TextDocumentContentChangeEvent( range=range, rangeLength=range_length, text=text), ) except: log.exception( "Error updating document: %s with changes: %s" % (textDocument, contentChanges)) self.lint(textDocument["uri"], is_saved=False)