Exemple #1
0
 def test_incomplete(self):
     '''
     A token is not completely consumed (this doesn't raise error messages,
     it just fails to match).
     '''
     token = Token('[a-z]+')(Any())
     token.config.no_full_first_match()
     parser = token.get_parse_string()
     assert parser('a') == ['a'], parser('a')
     # even though this matches the token, the Any() sub-matcher doesn't
     # consume all the contents
     assert parser('ab') == None, parser('ab')
     token = Token('[a-z]+')(Any(), complete=False)
     token.config.no_full_first_match()
     parser = token.get_parse_string()
     assert parser('a') == ['a'], parser('a')
     # whereas this is fine, since complete=False
     assert parser('ab') == ['a'], parser('ab')
Exemple #2
0
 def test_incomplete(self):
     '''
     A token is not completely consumed (this doesn't raise error messages,
     it just fails to match).
     '''
     token = Token('[a-z]+')(Any())
     token.config.no_full_first_match()
     parser = token.get_parse_string()
     assert parser('a') == ['a'], parser('a')
     # even though this matches the token, the Any() sub-matcher doesn't
     # consume all the contents
     assert parser('ab') == None, parser('ab')
     token = Token('[a-z]+')(Any(), complete=False)
     token.config.no_full_first_match()
     parser = token.get_parse_string()
     assert parser('a') == ['a'], parser('a')
     # whereas this is fine, since complete=False
     assert parser('ab') == ['a'], parser('ab')
Exemple #3
0
 def test_good_error_msg(self):
     '''
     Better error message with streams.
     '''
     #basicConfig(level=DEBUG)
     words = Token('[a-z]+')[:]
     words.config.lexer()
     parser = words.get_parse_string()
     try:
         parser('abc defXghi')
         assert False, 'expected error'
     except RuntimeLexerError as err:
         assert str(err) == 'No lexer for \'Xghi\' at line 1 character 7 ' \
             'of str: \'abc defXghi\'.', str(err)
Exemple #4
0
 def test_good_error_msg(self):
     '''
     Better error message with streams.
     '''
     #basicConfig(level=DEBUG)
     words = Token('[a-z]+')[:]
     words.config.lexer()
     parser = words.get_parse_string()
     try:
         parser('abc defXghi')
         assert False, 'expected error'
     except RuntimeLexerError as err:
         assert str(err) == 'No lexer for \'Xghi\' at line 1 character 7 ' \
             'of str: \'abc defXghi\'.', str(err)