Exemplo n.º 1
0
def dbg_wait_for(condition, msg=None, timeout=DEFAULT_TIMEOUT, sleep=1 / 20.0):
    from robocorp_ls_core.basic import wait_for_condition

    if "pydevd" in sys.modules:
        timeout = sys.maxsize

    wait_for_condition(condition, msg, timeout, sleep)
def test_libspec_cache_no_lib(libspec_manager, workspace_dir):
    from robotframework_ls.impl.robot_specbuilder import LibraryDoc
    import time
    from robocorp_ls_core.basic import wait_for_condition

    os.makedirs(workspace_dir)
    libspec_manager.add_additional_pythonpath_folder(workspace_dir)

    def disallow_cached_create_libspec(*args, **kwargs):
        raise AssertionError("Should not be called")

    library_info: Optional[LibraryDoc] = libspec_manager.get_library_info(
        "check_lib")
    assert library_info is None

    # Make sure that we don't try to create it anymore for the same lib.
    original_cached_create_libspec = libspec_manager._cached_create_libspec
    libspec_manager._cached_create_libspec = disallow_cached_create_libspec
    library_info: Optional[LibraryDoc] = libspec_manager.get_library_info(
        "check_lib")
    assert library_info is None
    libspec_manager._cached_create_libspec = original_cached_create_libspec

    time.sleep(0.1)

    path = Path(workspace_dir) / "check_lib.py"
    path.write_text("""
def method2(a:int):
    pass
""")
    # Check that the cache invalidation is in place!
    wait_for_condition(
        lambda: libspec_manager.get_library_info("check_lib") is not None,
        msg="Did not recreate library in the available timeout.",
    )
Exemplo n.º 3
0
def test_completion_with_auto_import_resource_import(workspace,
                                                     setup_case2_in_dir_doc):
    from robotframework_ls.impl.completion_context import CompletionContext
    from robotframework_ls.impl import auto_import_completions
    from robocorp_ls_core.basic import wait_for_condition

    doc = workspace.get_doc("case1.robot")

    doc.source = """
*** Keywords ***
KeywordInCase1
    In Lib 2
"""

    doc2 = setup_case2_in_dir_doc
    doc2.source = """
*** Test Cases ***
User can call library
    KeywordInCa"""

    wait_for_condition(lambda: len(
        auto_import_completions.complete(
            CompletionContext(doc2, workspace=workspace.ws), {})) == 1)
    completions = auto_import_completions.complete(
        CompletionContext(doc2, workspace=workspace.ws), {})

    assert len(completions) == 1
    apply_completion(doc2, completions[0])

    assert (doc2.source == """*** Settings ***
Resource    ../case1.robot

*** Test Cases ***
User can call library
    KeywordInCase1""")
Exemplo n.º 4
0
def test_system_mutex_locked_on_subprocess():
    import sys
    from robocorp_ls_core.subprocess_wrapper import subprocess
    from robocorp_ls_core.basic import kill_process_and_subprocesses
    from robocorp_ls_core.system_mutex import SystemMutex
    from robocorp_ls_core.basic import wait_for_condition

    code = '''
import sys
import time
print('initialized')
from robocorp_ls_core.system_mutex import SystemMutex
mutex = SystemMutex('test_system_mutex_locked_on_subprocess')
assert mutex.get_mutex_aquired()
print('acquired mutex')
sys.stdout.flush()
time.sleep(30)
'''
    p = subprocess.Popen([sys.executable, '-c', code], stdout=subprocess.PIPE, stdin=subprocess.PIPE)
    wait_for_condition(lambda: p.stdout.readline().strip() == b'acquired mutex')
    mutex = SystemMutex('test_system_mutex_locked_on_subprocess')
    assert not mutex.get_mutex_aquired()
    
    # i.e.: check that we can acquire the mutex if the related process dies.
    kill_process_and_subprocesses(p.pid)
    
    def acquire_mutex():
        mutex = SystemMutex('test_system_mutex_locked_on_subprocess')
        return mutex.get_mutex_aquired()
    wait_for_condition(acquire_mutex, timeout=5)
        def check_found_py(expected_basenames):
            from robocorp_ls_core.basic import wait_for_condition

            def check():
                found = list(virtual_fs._iter_all_doc_uris(extensions))
                return set([os.path.basename(x)
                            for x in found]) == set(expected_basenames)

            wait_for_condition(check)
Exemplo n.º 6
0
def test_completion_with_auto_handle_unparseable_error(workspace,
                                                       setup_case2_in_dir_doc,
                                                       workspace_dir):
    from robotframework_ls.impl.completion_context import CompletionContext
    from robotframework_ls.impl import auto_import_completions
    import os.path
    from robocorp_ls_core.basic import wait_for_condition

    doc = workspace.get_doc("case1.robot")
    doc.source = """/invalid/file/here ustehous usneothu snteuha usoentuho"""

    os.makedirs(os.path.join(workspace_dir, "in_dir"), exist_ok=True)
    with open(os.path.join(workspace_dir, "in_dir", "case3.robot"),
              "w") as stream:
        stream.write("""
*** Keywords ***
KeywordInCase1
    In Lib 2""")

    doc2 = setup_case2_in_dir_doc
    doc2.source = """
*** Test Cases ***
User can call library
    KeywordInCa"""

    # i.e.: make sure that our in-memory folders are in sync.
    wait_for_condition(lambda: len(
        auto_import_completions.complete(
            CompletionContext(doc2, workspace=workspace.ws), {})) == 1)

    completions = auto_import_completions.complete(
        CompletionContext(doc2, workspace=workspace.ws), {})

    assert len(completions) == 1

    apply_completion(doc2, completions[0])

    assert (doc2.source == """*** Settings ***
Resource    case3.robot

*** Test Cases ***
User can call library
    KeywordInCase1""")
Exemplo n.º 7
0
def test_server_stdin(setup: _Setup):
    from robocorp_ls_core import uris
    import os

    received_messages = setup.received_messages
    rf_interpreter_server_manager = setup.rf_interpreter_server_manager

    result = rf_interpreter_server_manager.interpreter_start(setup.uri)
    assert result["success"], f"Found: {result}"
    uri = setup.uri
    robot_file = uris.to_fs_path(uri)
    lib_file = os.path.join(os.path.dirname(robot_file), "my_lib.py")
    with open(lib_file, "w", encoding="utf-8") as stream:
        stream.write(r"""
def check_input():
    import sys
    sys.__stdout__.write('Enter something\n')
    return input()
""")
    rf_interpreter_server_manager.interpreter_evaluate(
        "*** Settings ***\nLibrary    ./my_lib.py")

    def check_input_in_thread():
        rf_interpreter_server_manager.interpreter_evaluate("Check Input")

    threading.Thread(target=check_input_in_thread).start()

    def wait_for_enter_something_output():
        for msg in received_messages:
            if (msg["method"] == "interpreter/output"
                    and "Enter something" in msg["params"]["output"]):
                return True
        return False

    wait_for_condition(wait_for_enter_something_output)
    assert rf_interpreter_server_manager._get_api_client().waiting_input
    rf_interpreter_server_manager.interpreter_evaluate("Something\n")
Exemplo n.º 8
0
def test_timeout():

    called = []

    def on_timeout(arg):
        assert arg == 1
        called.append(time.time())

    timeout_tracker = timeouts.TimeoutTracker()
    curtime = time.time()
    timeout = 0.2
    timeout_tracker.call_on_timeout(timeout, on_timeout, kwargs={"arg": 1})
    wait_for_condition(lambda: len(called) > 0)

    assert called
    assert called[0] >= curtime + timeout

    del called[:]
    with timeout_tracker.call_on_timeout(1.5, on_timeout, kwargs={"arg": 2}):
        time.sleep(0.5)

    assert not called
    time.sleep(2)
    assert not called
def test_workspace_memory_cache(tmpdir, small_vs_sleep):
    from robocorp_ls_core.workspace import Workspace
    from robocorp_ls_core import uris
    from robocorp_ls_core.lsp import WorkspaceFolder
    import os
    from typing import List
    from robocorp_ls_core.protocols import IWorkspaceFolder
    import typing
    from robocorp_ls_core.workspace import _WorkspaceFolderWithVirtualFS
    from robocorp_ls_core import watchdog_wrapper
    from robocorp_ls_core.basic import wait_for_condition

    root_uri = uris.from_fs_path(str(tmpdir))
    workspace_folders: List[IWorkspaceFolder] = [
        WorkspaceFolder(root_uri, os.path.basename(str(tmpdir)))
    ]
    fs_observer = watchdog_wrapper.create_observer("watchdog",
                                                   extensions=(".py", ".txt"))
    ws = Workspace(root_uri,
                   fs_observer,
                   workspace_folders,
                   track_file_extensions=(".py", ".txt"))

    folders = list(ws.iter_folders())
    assert len(folders) == 1
    folder = typing.cast(_WorkspaceFolderWithVirtualFS, folders[0])
    vs = folder._vs

    wait_for_condition(
        lambda: list(ws.iter_all_doc_uris_in_workspace((".py", ".txt"))) == [])
    # If the change is too fast the mtime may end up being the same...

    f = tmpdir.join("my.txt")
    f.write_text("foo", "utf-8")

    wait_for_condition(
        lambda: list(ws.iter_all_doc_uris_in_workspace(
            (".py", ".txt"))) == [uris.from_fs_path(str(f))])

    dir1 = tmpdir.join("dir1")
    dir1.mkdir()

    f2 = dir1.join("my.py")
    f2.write_text("foo", "utf-8")

    wait_for_condition(
        lambda: set(ws.iter_all_doc_uris_in_workspace((".py", ".txt"))) ==
        {uris.from_fs_path(str(f)),
         uris.from_fs_path(str(f2))})

    wait_for_condition(
        lambda: set(ws.iter_all_doc_uris_in_workspace((".py", ".txt"))) ==
        {uris.from_fs_path(str(f)),
         uris.from_fs_path(str(f2))})

    # If the change is too fast the mtime may end up being the same...
    f2.remove()

    wait_for_condition(
        lambda: set(ws.iter_all_doc_uris_in_workspace(
            (".py", ".txt"))) == {uris.from_fs_path(str(f))})

    wait_for_condition(
        lambda: set(ws.iter_all_doc_uris_in_workspace(
            (".py", ".txt"))) == {uris.from_fs_path(str(f))})

    ws.remove_folder(root_uri)
    assert set(ws.iter_all_doc_uris_in_workspace((".py", ".txt"))) == set()
    vs._virtual_fsthread.join(0.5)
    assert not vs._virtual_fsthread.is_alive()