Пример #1
0
 def assert_lex_equiv(self, s, stream2):
     # NOTE: lexer fails to reset after call?
     l = JsonPathLexer(debug=True)
     stream1 = list(l.tokenize(s)) # Save the stream for debug output when a test fails
     stream2 = list(stream2)
     assert len(stream1) == len(stream2)
     for token1, token2 in zip(stream1, stream2):
         print(token1, token2)
         assert token1.type  == token2.type
         assert token1.value == token2.value
Пример #2
0
 def assert_lex_equiv(self, s, stream2):
     # NOTE: lexer fails to reset after call?
     l = JsonPathLexer(debug=True)
     stream1 = list(l.tokenize(s)) # Save the stream for debug output when a test fails
     stream2 = list(stream2)
     assert len(stream1) == len(stream2)
     for token1, token2 in zip(stream1, stream2):
         print(token1, token2)
         assert token1.type  == token2.type
         assert token1.value == token2.value
Пример #3
0
    def check_parse_cases(self, test_cases):
        parser = JsonPathParser(
            debug=True, lexer_class=lambda: JsonPathLexer(debug=False)
        )  # Note that just manually passing token streams avoids this dep, but that sucks

        for string, parsed in test_cases:
            print(
                string, '=?=', parsed
            )  # pytest captures this and we see it only on a failure, for debugging
            assert parser.parse(string) == parsed
Пример #4
0
 def tokenize(s):
     l = JsonPathLexer(debug=True)
     return list(l.tokenize(s))
Пример #5
0
 def tokenize(s):
     l = JsonPathLexer(debug=True)
     return list(l.tokenize(s))