コード例 #1
0
ファイル: base_parser.py プロジェクト: Mythili0896/test3
    def parse(self) -> _NodeT:
        # Ensure that we don't re-use parsers.
        if self.__was_parse_called:
            raise Exception("Each parser object may only be used to parse once.")
        self.__was_parse_called = True

        for token in self.tokens:
            self._add_token(token)

        while True:
            tos = self.stack[-1]
            if not tos.dfa.is_final:
                expected_str = get_expected_str(
                    EOFSentinel.EOF, tos.dfa.transitions.keys()
                )
                raise ParserSyntaxError(
                    f"Incomplete input. {expected_str}",
                    lines=self.lines,
                    raw_line=len(self.lines),
                    raw_column=len(self.lines[-1]),
                )

            if len(self.stack) > 1:
                self._pop()
            else:
                return self.convert_nonterminal(tos.nonterminal, tos.nodes)
コード例 #2
0
    def _add_token(self, token: _TokenT) -> None:
        grammar = self._pgen_grammar
        stack = self.stack
        transition = _token_to_transition(grammar, token.type, token.string)

        while True:
            try:
                plan = stack[-1].dfa.transitions[transition]
                break
            except KeyError:
                if stack[-1].dfa.is_final:
                    self._pop()
                else:
                    expected_str = get_expected_str(
                        token, stack[-1].dfa.transitions.keys())
                    raise ParseTokenError(expected_str, token,
                                          token.start_pos[1])

        stack[-1].dfa = plan.next_dfa

        for push in plan.dfa_pushes:
            stack.append(StackNode(push))

        leaf = self.convert_terminal(token)
        stack[-1].nodes.append(leaf)
コード例 #3
0
    def parse(self):
        for token in self.tokens:
            self._add_token(token)

        while True:
            tos = self.stack[-1]
            if not tos.dfa.is_final:
                expected_str = get_expected_str(EOFSentinel.EOF,
                                                tos.dfa.transitions.keys())
                raise ParserSyntaxError(
                    f"{expected_str}",
                    lines=self.lines,
                    raw_line=len(self.lines),
                    raw_column=len(self.lines[-1]),
                )

            if len(self.stack) > 1:
                self._pop()
            else:
                return self.convert_nonterminal(tos.nonterminal, tos.nodes)
コード例 #4
0
ファイル: base_parser.py プロジェクト: Mythili0896/test3
    def _add_token(self, token: _TokenT) -> None:
        """
        This is the only core function for parsing. Here happens basically
        everything. Everything is well prepared by the parser generator and we
        only apply the necessary steps here.
        """
        grammar = self._pgen_grammar
        stack = self.stack
        # pyre-fixme[6]: Expected `_TokenTypeT` for 2nd param but got `TokenType`.
        transition = _token_to_transition(grammar, token.type, token.string)

        while True:
            try:
                plan = stack[-1].dfa.transitions[transition]
                break
            except KeyError:
                if stack[-1].dfa.is_final:
                    try:
                        self._pop()
                    except PartialParserSyntaxError as ex:
                        # Upconvert the PartialParserSyntaxError to a ParserSyntaxError
                        # by backfilling the line/column information.
                        raise ParserSyntaxError(
                            ex.message,
                            lines=self.lines,
                            raw_line=token.start_pos[0],
                            raw_column=token.start_pos[1],
                        )
                    except Exception as ex:
                        # convert_nonterminal may fail due to a bug in our code. Try to
                        # recover enough to at least tell us where in the file it
                        # failed.
                        raise ParserSyntaxError(
                            f"Internal error: {ex}",
                            lines=self.lines,
                            raw_line=token.start_pos[0],
                            raw_column=token.start_pos[1],
                        )
                else:
                    # We never broke out -- EOF is too soon -- Unfinished statement.
                    #
                    # BUG: The `expected_str` may not be complete because we already
                    # popped the other possibilities off the stack at this point, but
                    # it still seems useful to list some of the possibilities that we
                    # could've expected.
                    expected_str = get_expected_str(
                        token, stack[-1].dfa.transitions.keys()
                    )
                    raise ParserSyntaxError(
                        f"Incomplete input. {expected_str}",
                        lines=self.lines,
                        raw_line=token.start_pos[0],
                        raw_column=token.start_pos[1],
                    )
            except IndexError:
                # I don't think this will ever happen with Python's grammar, because if
                # there are any extra tokens at the end of the input, we'll instead
                # complain that we expected ENDMARKER.
                #
                # However, let's leave it just in case.
                expected_str = get_expected_str(token, EOFSentinel.EOF)
                raise ParserSyntaxError(
                    f"Too much input. {expected_str}",
                    lines=self.lines,
                    raw_line=token.start_pos[0],
                    raw_column=token.start_pos[1],
                )

        # Logically, `plan` is always defined, but pyre can't reasonably determine that.
        # pyre-fixme[18]: Global name `plan` is undefined.
        stack[-1].dfa = plan.next_dfa

        for push in plan.dfa_pushes:
            stack.append(StackNode(push))

        leaf = self.convert_terminal(token)
        stack[-1].nodes.append(leaf)