Exemplo n.º 1
0
 def __init__(self, service_registry, context_cache):
     self.parser = Parser()
     self.context_cache = context_cache
     self.builtin = BuiltinCompletion(context_cache)
     self.function = FunctionCompletion(context_cache)
     self.service_handler = ServiceHandler(service_registry, context_cache)
     self.service = ServiceCompletion(self.service_handler)
Exemplo n.º 2
0
 def __init__(self, service_registry):
     self.parser = Parser()
     self.service = ServiceHandler(service_registry, None)
Exemplo n.º 3
0
class Indentation:
    """
    Computation of the next indentation for a line.
    """

    def __init__(self, service_registry):
        self.parser = Parser()
        self.service = ServiceHandler(service_registry, None)

    def indent(self, ws, doc, pos, indent_unit):
        """
        Returns the indentation for the next line based for the given document
        and position.
        """
        context = CompletionContext(ws=ws, doc=doc, pos=pos)
        log.info("Line on cursor: %s", context.line)
        indent_state = IndentState.detect(context.line, indent_unit)

        self._indent(context, indent_state)

        indentation = indent_state.indentation()
        insert_pos = pos
        return {
            "indent": indentation,
            "textEdits": [
                TextEdit(
                    Range(start=insert_pos, end=insert_pos),
                    new_text=f"\n{indentation}",
                ).dump()
            ],
        }

    def _indent(self, context, indent_state):
        try:
            tokens = [*self.parser.tokenize(context.line)]
        except LexerException:
            log.warning(f"Lexing error for line: {context.line}")
            return

        if len(tokens) == 0:
            return

        first_tok = tokens[0].text()

        # special case for partially invalid blocks
        if first_tok == "catch" or first_tok == "else":
            indent_state.add()
            return

        # shortcut for start of new blocks
        if (
            first_tok == "try"
            or first_tok == "while"
            or first_tok == "if"
            or first_tok == "foreach"
            or first_tok == "when"
            or first_tok == "function"
        ):
            indent_state.add()
            return

        # only look at most at the first two tokens
        tokens = tokens[:2]
        try:
            stack = self.parser.stack_tokens(tokens)
        except NotImplementedError:
            log.warning(f"Invalid line: {context.line}")
            return
        except IndexError:
            log.warning(f"Invalid line: {context.line}")
            return

        transitions = [*self.parser.transitions(stack)]
        for tok, dfa in transitions:
            from_rule = dfa.next_dfa.from_rule
            assert len(dfa.dfa_pushes) > 0
            next_rule = dfa.dfa_pushes[0].from_rule
            if from_rule == "service_suffix" and next_rule == "arglist":
                # service blocks should trigger an indent iff they have events
                if self._service_has_events(stack):
                    indent_state.add()
                    return
                return

    def _service_has_events(self, stack):
        """
        Returns `True` if the service has events.
        """
        service_name = Stack.extract(stack, "value")[0].value
        service_command = Stack.extract(stack, "service_op")[0].value

        action = self.service.action(service_name, service_command)
        if action is not None:
            if len(action.events()) > 0:
                return True

        return False
Exemplo n.º 4
0
def test_extract():
    stack = Parser().stack("foo bar foo:1")
    res = Stack.extract(stack, "arglist", "value")
    assert len(res) == 2
    assert res[0].value == "foo"
    assert res[1].value == ":"
Exemplo n.º 5
0
def test_stack_find_all_until_no_rule():
    stack = Parser().stack("foo bar foo:1")
    with raises(AssertionError):
        [*Stack.find_all_until(stack, "arglist", ["no_rule"])]
Exemplo n.º 6
0
def test_stack_find_all_until_3():
    stack = Parser().stack("foo bar arg1:1 + 1 arg2:2 arg3: (foo bar arg4:1)")
    res = Stack.find_all_until(stack, "arglist", ["service_suffix"])
    assert [*res] == ["arg1", "arg2", "arg3"]
Exemplo n.º 7
0
def test_stack_find_all_until():
    stack = Parser().stack("foo bar arg1:1")
    res = Stack.find_all_until(stack, "arglist", ["service_suffix"])
    assert [*res] == ["arg1"]
Exemplo n.º 8
0
def test_stack_find_closest_rule_no_rule():
    stack = Parser().stack("foo bar foo:1")
    with raises(AssertionError):
        Stack.find_closest_rule(stack, ["no_rule"])
Exemplo n.º 9
0
def test_stack_find_closest_rule():
    stack = Parser().stack("foo bar foo:1")
    res = Stack.find_closest_rule(stack, ["arglist", "no_rule"])
    assert res == "arglist"
Exemplo n.º 10
0
def test_extract_seen_no_rule():
    stack = Parser().stack("foo bar foo:1")
    with raises(AssertionError):
        Stack.extract(stack, "arglist", "no_rule")
Exemplo n.º 11
0
class ASTAnalyzer:
    def __init__(self, service_registry, context_cache):
        self.parser = Parser()
        self.context_cache = context_cache
        self.builtin = BuiltinCompletion(context_cache)
        self.function = FunctionCompletion(context_cache)
        self.service_handler = ServiceHandler(service_registry, context_cache)
        self.service = ServiceCompletion(self.service_handler)

    def complete(self, context):
        try:
            yield from self._complete(context)
        except IndexError:
            log.warning(f"Invalid line (lexer): {context.line}")
            return
        except LexerException:
            log.warning(f"Invalid line (lexer): {context.line}")
            return

    def _complete(self, context):
        is_space = len(context.line) > 0 and context.line[-1] == " "
        log.debug(f"line: '{context.line}'")
        line = context.line

        tokens = [*self.parser.tokenize(line)]

        like_word = ""
        is_word = len(tokens) > 0 and tokens[-1].text().isalnum()
        if not is_space and is_word:
            # current token under the cursor
            context.token_word = tokens[-1].text()
            # remove word from token stream -> save for completion filter
            like_word = tokens.pop().text()
            # case-insensitive searches
            like_word = like_word.lower()
        else:
            context.token_word = ""

        log.debug("tokens: %s", tokens)

        try:
            stack = self.parser.stack_tokens(tokens)
        except NotImplementedError:
            log.warning(f"Invalid line: {context.line}")
            return

        transitions = [*self.parser.transitions(stack)]
        parse_state = ParseState(context)
        parse_state.detect(tokens)

        # iterate all non-terminals in the transitions and
        # processes upcoming next_rules
        for tok, dfa in transitions:
            completion = self.process_nonterminal(tok, dfa, stack, parse_state)
            for c in completion:
                c = c.to_completion(context)
                if c["label"].lower().startswith(like_word):
                    yield c

        if not self.state_with_ops(transitions):
            log.debug("state without operator completion.")
            return

        for op in self.process_ops(stack):
            if op.keyword.lower().startswith(like_word):
                yield op

    def process_ops(self, stack):
        """
        Yield all potential alphabetic operators
        """
        # only yield an operator once
        ops = set(self.parser.operators(stack))
        for op in ops:
            # only complete alphabetic operators for now
            if op.isalpha():
                yield KeywordCompletionSymbol(op)

    @staticmethod
    def state_with_ops(transitions):
        """
        Iterate over all non-terminal transitions and check whether operators should be yielded.
        For now, this is a simple blacklist of special rules.
        """
        count = 0

        # ignore operator yielding when only special rules have been observed
        only_special_rules = True
        for tok, dfa in transitions:
            if tok == StoryTokenSpace.RPARENS:
                continue
            if tok == StoryTokenSpace.DOT:
                continue
            if tok == StoryTokenSpace.NL:
                continue
            if tok == StoryTokenSpace.AS:
                continue
            from_rule = dfa.next_dfa.from_rule
            only_special_rules &= from_rule == "service_suffix"
            count += 1

        # no real transitions
        if count == 0:
            return True

        return not only_special_rules

    def process_nonterminal(self, tok, dfa, stack, parse_state):
        """
        Forwards processing of the non-terminal to its respective processor.
        """
        log.debug("process non-terminal: %s", tok)
        if tok == StoryTokenSpace.NAME:
            yield from self.process_name(dfa, stack, parse_state)
        elif tok == StoryTokenSpace.NULL:
            yield KeywordCompletionSymbol("null")
        elif tok == StoryTokenSpace.STRING:
            # no completion for strings
            pass
        elif tok == StoryTokenSpace.LPARENS or tok == StoryTokenSpace.RPARENS:
            pass
        elif tok == StoryTokenSpace.DOT:
            pass
        elif tok == StoryTokenSpace.NL:
            pass
        elif tok == StoryTokenSpace.COLON:
            pass
        elif tok == StoryTokenSpace.AS:
            yield from self.process_as(stack, parse_state)
        elif tok == StoryTokenSpace.FOREACH:
            indent_state = parse_state.context.detect_indentation()
            indent = indent_state.add().indentation()
            yield KeywordCompletionSymbol("foreach", indent=indent)
        else:
            # no completion for numbers
            assert tok == StoryTokenSpace.NUMBER, tok

    def process_name(self, dfa, stack, parse_state):
        """
        Completion for a NAME token can be in different contexts.
        This looks at the current stack and distinguishes.
        """
        from_rule = dfa.next_dfa.from_rule
        if len(dfa.dfa_pushes) == 0:
            assert from_rule == "as_suffix", from_rule
            return
        next_rule = dfa.dfa_pushes[0].from_rule
        log.debug(
            "process_name, from_rule:%s, next_rule:%s",
            from_rule,
            next_rule,
        )
        if next_rule == "service_suffix":
            assert from_rule == "value", from_rule
            yield from self.service.process_suffix(
                stack,
                value_stack_name="value",
                in_assignment=parse_state.in_assignment,
            )
        elif next_rule == "arglist":
            yield from self.process_arglist(stack, from_rule)
        elif next_rule == "arg_name":
            yield from self.process_arg_name(stack, from_rule)
        elif next_rule == "block":
            yield from self.get_names()
        elif next_rule == "fn_arg_name":
            # no name completion for function args in function declaration
            return
        elif next_rule == "fn_name":
            # no name completion for function names in function declaration
            return
        elif next_rule == "fn_arguments":
            # no name completion for function args in function declaration
            return
        elif next_rule == "dot_name":
            yield from self.builtin.process_name(stack)
        elif next_rule == "mut_arg_name":
            yield from self.builtin.process_args(stack)
        elif next_rule == "when_expression":
            yield from self.get_service_names()
        elif next_rule == "when_action":
            yield from self.service.process_when_name(stack)
        elif next_rule == "when_action_name":
            yield from self.service.process_when_command(stack)
        elif next_rule == "when_arglist":
            yield from self.process_arglist(stack, from_rule)
        elif next_rule == "when_action_suffix":
            yield from self.process_when(stack)
        elif next_rule == "value":
            yield from self.process_value(stack)
        else:
            assert next_rule == "expression", next_rule
            yield from self.get_names()

    def process_args(self, stack):
        """
        Extract previous token for service or function argument completion
        with at least one previous argument. This looks for seen args
        and filters them out.
        """
        suffixes = ["fn_suffix", "service_suffix"]
        last_rule = Stack.find_closest_rule(stack, suffixes)

        # second or further arguments -> filter used arguments
        # <name> ':' <expr>
        prev_args = [
            *Stack.find_all_until(
                stack, "arglist", suffixes, start=0, offset=3)
        ]

        if last_rule == "service_suffix":
            yield from self.service.process_args(stack, prev_args)
        else:
            assert last_rule == "fn_suffix"
            yield from self.function.process_args(stack, prev_args)

    def process_arg_name(self, stack, from_rule):
        if from_rule == "when_arglist":
            yield from self.process_when_args(stack)
        else:
            yield from self.process_args(stack)

    def process_arglist(self, stack, from_rule):
        if from_rule == "service_suffix":
            yield from self.service.process_command(
                stack,
                value_stack_name="value",
                command_name="service_op",
            )
        elif from_rule == "fn_suffix":
            yield from self.function.process_name(stack)
        else:
            assert from_rule == "when_action_suffix", from_rule
            yield from self.process_when(stack)

    def process_when(self, stack):
        """
        Process a when statement without args.
        """
        name = Stack.extract(stack, "when_expression")[0].value
        when_action = Stack.extract(stack, "when_action")
        action = when_action[0].value
        prev_args = []
        try:
            event = Stack.extract(stack, "when_action_name")[0].value
        except Exception:
            # COLON (only two names provided, add the third name as argument)
            event = None
            prev_args.append(when_action[1].value)

        yield from self.service.when(name, action, event, prev_args)

    def process_when_args(self, stack):
        """
        Process a when statement with args.
        """
        name = Stack.extract(stack, "when_expression")[0].value
        actions = Stack.extract(stack, "when_action")
        suffix = Stack.extract(stack, "when_action_suffix")
        prev_args = [
            *Stack.find_all_until(
                stack, "when_arglist", ["when_expression"], start=0, offset=3)
        ]
        event = None
        assert len(actions) == 2
        action = actions[0].value
        # it might have been the event name or an argument
        if len(suffix) > 0 and suffix[0].value == ":":
            prev_args.append(actions[1].value)
        else:
            event = actions[1].value

        yield from self.service.when(name, action, event, prev_args)

    def process_as(self, stack, parse_state):
        """
        Process an upcoming 'as' rule and decide whether it can occur.
        """
        if parse_state.in_assignment:
            # use of 'as' in an assignment is definitely invalid
            return
        if parse_state.nestedness != 0:
            # use of 'as' inside parenthesis is definitely invalid
            return

        as_keyword = KeywordCompletionSymbol(
            "as", sort_group=SortGroup.ContextKeyword)
        suffixes = ["service_op", "service_suffix"]

        try:
            last_rule = Stack.find_closest_rule(stack, suffixes)
            service_name = Stack.extract(stack, "value",
                                         "service_suffix")[0].value
            service_command = Stack.extract(stack, last_rule)[0].value
        except Exception:
            # attempt to parse service_block failed - must be a when_block or
            # similar
            yield as_keyword
            return

        action = self.service_handler.action(service_name, service_command)
        if action is None:
            return

        # only yield 'as' iff the service can start an event block (=it has
        # events)
        if len(action.events()) > 0:
            yield as_keyword

    def process_value(self, stack):
        yield from self.context_cache.complete("")

    def get_names(self):
        """
        Yields all symbols and services.
        """
        log.debug("get_names")
        yield from self.context_cache.complete("")
        yield from self.service_handler.services("")

    def get_service_names(self):
        """
        Yields all available service object symbols and services.
        """
        log.debug("get_service_names")
        yield from self.context_cache.service_objects()
        yield from self.service_handler.services("")
Exemplo n.º 12
0
def parse(text):
    parser = Parser()
    stack = parser.stack(text)
    return [*parser.transitions_tokens(stack)]