Esempio n. 1
0
 def setup_class(cls):
     from rpython.rlib.parsing.parsing import PackratParser
     regexs, rules, ToAST = parse_ebnf(grammar)
     cls.ToAST = ToAST()
     cls.parser = PackratParser(rules, rules[0].nonterminal)
     cls.regexs = regexs
     names, regexs = zip(*regexs)
     cls.lexer = Lexer(list(regexs), list(names))
Esempio n. 2
0
    def createDFA(self, rules):
        # lex lexing rules
        pl = PriorityLexer(rules)
        rules = sorted(pl.rules.items(), key=lambda node: node[1][0]) # sort by priority

        # create lexer automaton from rules
        regexs = []
        names = []
        for k, _ in rules:
            regex = k
            name = pl.rules[k][1]
            r = parse_regex(regex)
            regexs.append(r)
            names.append(name)
        self.lexer = Lexer(regexs, names)
Esempio n. 3
0
def make_lexer():
    return Lexer([parse_regex(globals()[r]) for r in tokens], tokens[:])
Esempio n. 4
0
 def from_name_and_regex(self, names, regexs):
     parsed_regexs = []
     for regex in regexs:
         r = parse_regex(regex)
         parsed_regexs.append(r)
     self.lexer = Lexer(parsed_regexs, names)