Example #1
0
    def createDFA(self, rules):
        # lex lexing rules
        pl = PriorityLexer(rules)
        rules = sorted(pl.rules.items(), key=lambda node: node[1][0]) # sort by priority

        # create lexer automaton from rules
        regexs = []
        names = []
        for k, _ in rules:
            regex = k
            name = pl.rules[k][1]
            r = parse_regex(regex)
            regexs.append(r)
            names.append(name)
        self.lexer = Lexer(regexs, names)
Example #2
0
    def createDFA(self, rules):
        # lex lexing rules
        pl = PriorityLexer(rules)
        rules = sorted(pl.rules.items(), key=lambda node: node[1][0]) # sort by priority

        # create lexer automaton from rules
        regexs = []
        names = []
        for k, _ in rules:
            regex = k
            name = pl.rules[k][1]
            r = parse_regex(regex)
            regexs.append(r)
            names.append(name)
        self.lexer = Lexer(regexs, names)
Example #3
0
def make_lexer():
    return Lexer([parse_regex(globals()[r]) for r in tokens], tokens[:])
Example #4
0
 def from_name_and_regex(self, names, regexs):
     parsed_regexs = []
     for regex in regexs:
         r = parse_regex(regex)
         parsed_regexs.append(r)
     self.lexer = Lexer(parsed_regexs, names)
Example #5
0
def make_lexer():
    return Lexer([parse_regex(globals()[r]) for r in tokens], tokens[:])
Example #6
0
 def from_name_and_regex(self, names, regexs):
     parsed_regexs = []
     for regex in regexs:
         r = parse_regex(regex)
         parsed_regexs.append(r)
     self.lexer = Lexer(parsed_regexs, names)