예제 #1
0
    def test_token_parser(self):
        tokens = Tokenizer()
        tokens.add_rule(tok.Literal('hello', 'hello'))
        tokens.add_rule(tok.CharSet('space', ' ', '\t', '\r'))
        tokens.add_rule(tok.Literal('world', 'world'))

        grammar = Grammar()
        init_parser = parse.Tuple().element(parse.Token('hello'))\
                                   .element(parse.Token('space'))\
                                   .element(parse.Token('world'))\
                                   .element(parse.EOF())
        grammar.register('init', init_parser)
        llparser = LLParsing(grammar)
        llparser.tokenizer = tokens
        tokens.from_string("hello world")
        res = llparser.parse()
        self.assertTrue(len(res.content) == 4)
        self.assertTrue(res.content[0].content.value == 'hello')
        self.assertTrue(res.content[1].content.value == ' ')
        self.assertTrue(res.content[2].content.value == 'world')
        self.assertTrue(res.content[3].content.iseof)
예제 #2
0
 def parse_functype_from_string(self, string):
     parser = LLParsing(self.functype_grammar)
     parser.tokenizer = self.tokenizer
     self.tokenizer.from_string(string)
     return parser.parse()
예제 #3
0
 def parse_typeexpr_from_string(self, string):
     parser = LLParsing(self.typeexpr_grammar)
     parser.tokenizer = self.tokenizer
     self.tokenizer.from_string(string)
     return parser.parse()
예제 #4
0
 def parse_from_string(self, string):
     tokenizer = CalculatorEval.calculator_tokenizer()
     parser = LLParsing(self.grammar)
     parser.tokenizer = tokenizer
     tokenizer.from_string(string)
     return parser.parse()
예제 #5
0
 def parse_functype_from_string(self, string):
     parser = LLParsing(self.functype_grammar)
     parser.tokenizer = self.tokenizer
     self.tokenizer.from_string(string)
     return parser.parse()
예제 #6
0
 def parse_typeexpr_from_string(self, string):
     parser = LLParsing(self.typeexpr_grammar)
     parser.tokenizer = self.tokenizer
     self.tokenizer.from_string(string)
     return parser.parse()
예제 #7
0
 def parse_from_string(string):
     tokenizer = PiParser.pi_tokenizer()
     parser = LLParsing(PiParser.pi_grammar())
     parser.tokenizer = tokenizer
     tokenizer.from_string(string)
     return parser.parse()