Example #1
0
class TokenRuleVisitor(RuleVisitor):
    """A visitor which builds expression trees meant to work on sequences of
    pre-lexed tokens rather than strings"""
    def visit_spaceless_literal(self, spaceless_literal, visited_children):
        """Turn a string literal into a ``TokenMatcher`` that matches
        ``Token`` objects by their ``type`` attributes."""
        return TokenMatcher(evaluate_string(spaceless_literal.text))

    def visit_regex(self, regex, (tilde, literal, flags, _)):
        raise BadGrammar('Regexes do not make sense in TokenGrammars, since '
                         'TokenGrammars operate on pre-lexed tokens rather '
                         'than characters.')
Example #2
0
 def visit_regex(self, node, regex):
     tilde, literal, flags, _ = regex
     raise BadGrammar('Regexes do not make sense in TokenGrammars, since '
                      'TokenGrammars operate on pre-lexed tokens rather '
                      'than characters.')