Ejemplo n.º 1
0
 def assert_lex_equiv(self, s, stream2):
     # NOTE: lexer fails to reset after call?
     l = JsonPathLexer(debug=True)
     stream1 = list(l.tokenize(s)) # Save the stream for debug output when a test fails
     stream2 = list(stream2)
     assert len(stream1) == len(stream2)
     for token1, token2 in zip(stream1, stream2):
         print(token1, token2)
         assert token1.type  == token2.type
         assert token1.value == token2.value
Ejemplo n.º 2
0
 def assert_lex_equiv(self, s, stream2):
     # NOTE: lexer fails to reset after call?
     l = JsonPathLexer(debug=True)
     stream1 = list(l.tokenize(s)) # Save the stream for debug output when a test fails
     stream2 = list(stream2)
     assert len(stream1) == len(stream2)
     for token1, token2 in zip(stream1, stream2):
         print(token1, token2)
         assert token1.type  == token2.type
         assert token1.value == token2.value
Ejemplo n.º 3
0
 def tokenize(s):
     l = JsonPathLexer(debug=True)
     return list(l.tokenize(s))
Ejemplo n.º 4
0
 def tokenize(s):
     l = JsonPathLexer(debug=True)
     return list(l.tokenize(s))