def __init__(self): # , predefined_symbols=None): grammar.Parser.__init__(self) pytoken.setup_tokens(self) # remember how many tokens were loaded self._basetokens_count = self._sym_count # if predefined_symbols: # self.load_symbols(predefined_symbols) self.keywords = []
def build_parser(gramfile, parser=None): """reads a (EBNF) grammar definition and builds a parser for it""" if parser is None: parser = Parser() setup_tokens(parser) # XXX: clean up object dependencies from pypy.rlib.streamio import open_file_as_stream stream = open_file_as_stream(gramfile) grammardef = stream.readall() stream.close() assert isinstance(grammardef, str) source = GrammarSource(GRAMMAR_GRAMMAR, grammardef) builder = EBNFBuilder(GRAMMAR_GRAMMAR, dest_parser=parser) GRAMMAR_GRAMMAR.root_rules['grammar'].match(source, builder) builder.resolve_rules() parser.build_first_sets() parser.keywords = builder.keywords return parser
def setup_class(self): self.parser = ParserStub() setup_tokens(self.parser)