def semantic_tokens_full(context: CompletionContext):
    from robotframework_ls.impl import ast_utils

    try:
        ast = context.doc.get_ast()
    except:
        return []

    ret: List[int] = []
    append = ret.append

    last_line = 0
    last_column = 0
    for _stack, node in ast_utils._iter_nodes(ast, recursive=True):
        tokens = getattr(node, "tokens", None)
        if tokens:
            for token in tokens:
                for token_part, token_type_index in tokenize_variables(token):
                    lineno = token_part.lineno - 1
                    append(lineno - last_line)
                    if lineno != last_line:
                        last_column = token_part.col_offset
                        append(last_column)
                    else:
                        col_delta = token_part.col_offset - last_column
                        append(col_delta)
                        last_column += col_delta

                    append(token_part.end_col_offset - token_part.col_offset)  # len
                    append(token_type_index)
                    append(0)  # i.e.: no modifier
                    last_line = lineno

    return ret
Пример #2
0
def test_iter_nodes():
    from robotframework_ls.impl import ast_utils
    from robotframework_ls.impl.robot_workspace import RobotDocument

    doc = RobotDocument(
        "unused", source="*** settings ***\nResource    my_resource.resource")
    lst = []
    for stack, node in ast_utils._iter_nodes(doc.get_ast()):
        lst.append("%s - %s" % ([s.__class__.__name__
                                 for s in stack], node.__class__.__name__))
    assert lst == [
        "[] - SettingSection",
        "['SettingSection'] - SettingSectionHeader",
        "['SettingSection'] - ResourceImport",
    ]
Пример #3
0
def semantic_tokens_full_from_ast(ast, monitor: Optional[IMonitor]):
    from robotframework_ls.impl import ast_utils

    ret: List[int] = []
    append = ret.append

    last_line = 0
    last_column = 0
    for _stack, node in ast_utils._iter_nodes(ast, recursive=True):
        if monitor:
            monitor.check_cancelled()
        tokens = getattr(node, "tokens", None)
        if tokens:
            for token in tokens:
                for token_part, token_type_index in tokenize_variables(
                        node, token):
                    lineno = token_part.lineno - 1
                    if lineno < 0:
                        lineno = 0
                    append(lineno - last_line)
                    if lineno != last_line:
                        last_column = token_part.col_offset
                        if last_column < 0:
                            last_column = 0
                        append(last_column)
                    else:
                        col_delta = token_part.col_offset - last_column
                        append(col_delta)
                        last_column += col_delta

                    append(token_part.end_col_offset -
                           token_part.col_offset)  # len
                    append(token_type_index)
                    append(0)  # i.e.: no modifier
                    last_line = lineno

    return ret