def collect_analysis_errors(completion_context): from robotframework_ls.impl import ast_utils from robotframework_ls.impl.ast_utils import create_error_from_node from robotframework_ls.impl.collect_keywords import collect_keywords from robotframework_ls.impl.text_utilities import normalize_robot_name errors = [] collector = _KeywordsCollector() collect_keywords(completion_context, collector) ast = completion_context.get_ast() for keyword_usage_info in ast_utils.iter_keyword_usage_tokens(ast): completion_context.check_cancelled() normalized_name = normalize_robot_name(keyword_usage_info.name) if not collector.contains_keyword(normalized_name): # There's not a direct match, but the library name may be builtin # into the keyword name, so, check if we have a match that way. node = keyword_usage_info.node error = create_error_from_node( node, "Undefined keyword: %s." % (keyword_usage_info.name, ), tokens=[keyword_usage_info.token], ) errors.append(error) if len(errors) >= MAX_ERRORS: # i.e.: Collect at most 100 errors break return errors
def find_definition(completion_context): """ :param CompletionContext completion_context: :rtype: list(IDefinition) :note: Definitions may be found even if a given source file no longer exists at this place (callers are responsible for validating entries). """ from robotframework_ls.impl import ast_utils from robotframework_ls.impl.collect_keywords import collect_keywords from robotframework_ls.impl.string_matcher import RobotStringMatcher from robotframework_ls.impl.variable_completions import collect_variables token_info = completion_context.get_current_token() if token_info is not None: token = ast_utils.get_keyword_name_token(token_info.node, token_info.token) if token is not None: collector = _FindDefinitionKeywordCollector(token.value) collect_keywords(completion_context, collector) return collector.matches token = ast_utils.get_library_import_name_token( token_info.node, token_info.token) if token is not None: libspec_manager = completion_context.workspace.libspec_manager library_doc = libspec_manager.get_library_info( token.value, create=True, current_doc_uri=completion_context.doc.uri) if library_doc is not None: definition = _DefinitionFromLibrary(library_doc) return [definition] token = ast_utils.get_resource_import_name_token( token_info.node, token_info.token) if token is not None: resource_import_as_doc = completion_context.get_resource_import_as_doc( token_info.node) if resource_import_as_doc is not None: return [_DefinitionFromResource(resource_import_as_doc)] token_info = completion_context.get_current_variable() if token_info is not None: token = token_info.token value = token.value collector = _FindDefinitionVariablesCollector( completion_context.sel, token, RobotStringMatcher(value)) collect_variables(completion_context, collector) return collector.matches return []
def complete(completion_context: ICompletionContext) -> List[dict]: from robotframework_ls.impl.collect_keywords import collect_keywords from robotframework_ls.impl import ast_utils token_info = completion_context.get_current_token() if token_info is not None: token = ast_utils.get_keyword_name_token(token_info.node, token_info.token) if token is not None: collector = _Collector(completion_context.sel, token) collect_keywords(completion_context, collector) return collector.completion_items return []
def collect_analysis_errors(completion_context: ICompletionContext): errors = [] collector = _KeywordsCollector() collect_keywords(completion_context, collector) ast = completion_context.get_ast() for keyword_usage_info in ast_utils.iter_keyword_usage_tokens(ast): completion_context.check_cancelled() normalized_name = normalize_robot_name(keyword_usage_info.name) if not collector.contains_keyword(normalized_name): # There's not a direct match, but the library name may be builtin # into the keyword name, so, check if we have a match that way. node = keyword_usage_info.node error = create_error_from_node( node, "Undefined keyword: %s." % (keyword_usage_info.name, ), tokens=[keyword_usage_info.token], ) errors.append(error) else: multi = collector.check_multiple_keyword_definitions( normalized_name) if multi is not None: node = keyword_usage_info.node error = create_error_from_node( node, "Multiple keywords with name '%s' found. Give the full name of the keyword you want to use:\n%s" % (keyword_usage_info.name, "\n".join([ f" {m.library_alias}.{m.keyword_name}" for m in multi ])), tokens=[keyword_usage_info.token], ) errors.append(error) if len(errors) >= MAX_ERRORS: # i.e.: Collect at most 100 errors break errors.extend(CodeAnalysisVisitor.find_from(completion_context)) return errors
def find_keyword_definition( completion_context: ICompletionContext, token_info: TokenInfo) -> Optional[Sequence[IKeywordDefinition]]: """ Find a definition only considering Keywords. The token info must be already computed and must match the completion context location. """ from robotframework_ls.impl.collect_keywords import collect_keywords from robotframework_ls.impl import ast_utils token = ast_utils.get_keyword_name_token(token_info.node, token_info.token) if token is not None: collector = _FindDefinitionKeywordCollector(token.value) collect_keywords(completion_context, collector) return collector.matches return None
def find_definition(completion_context): """ :param CompletionContext completion_context: :rtype: list(IDefinition) :note: Definitions may be found even if a given source file no longer exists at this place (callers are responsible for validating entries). """ from robotframework_ls.impl.collect_keywords import collect_keywords from robotframework_ls.impl import ast_utils token_info = completion_context.get_current_token() if token_info is not None: token = ast_utils.get_keyword_name_token(token_info.node, token_info.token) if token is not None: collector = _Collector(token.value) collect_keywords(completion_context, collector) return collector.matches return []