示例#1
0
 def test_with_keyword_trailing_whitespace(self):
     self.assertEqual(
         {
             'name': lexer.Token('N', 'foo', at(1, 1)),
             'keyword': lexer.Token('K', ':bar', at(1, 5)),
             'rest': lexer.Token('S', None, at(1, 9)),
         }, lexer.lex_name(self.tok('foo :bar    ')))
示例#2
0
 def test_name_only_trailing_whitespace(self):
     self.assertEqual(
         {
             'name': lexer.Token('N', 'foo', at(1, 1)),
             'keyword': lexer.Token('K', None, at(1, 4)),
             'rest': lexer.Token('S', None, at(1, 4)),
         }, lexer.lex_name(self.tok('foo    ')))
示例#3
0
 def test_trailing_whitespace(self):
     self.assertEqual(
         {
             'name': lexer.Token('N', 'foo', at(1, 4)),
             'keyword': lexer.Token('K', ':bar', at(1, 8)),
             'rest': lexer.Token('S', 'other stuff', at(1, 13)),
         }, lexer.lex_name(self.tok('   foo :bar other stuff   ')))
示例#4
0
 def test_with_rest(self):
     self.assertEqual(
         {
             'name': lexer.Token('N', 'foo', at(1, 1)),
             'keyword': lexer.Token('K', ':bar', at(1, 5)),
             'rest': lexer.Token('S', 'other stuff', at(1, 10)),
         }, lexer.lex_name(self.tok('foo :bar other stuff')))
示例#5
0
 def test_name_rest_no_keyword(self):
     self.assertEqual(
         {
             'name': lexer.Token('N', 'foo', at(1, 1)),
             'keyword': lexer.Token('K', None, at(1, 5)),
             'rest': lexer.Token('S', 'other stuff', at(1, 5)),
         }, lexer.lex_name(self.tok('foo other stuff')))
示例#6
0
 def test_basic_identifier(self):
     l1 = lexer.Token('C', '-- :name identifier :1', at(1, 1))
     l2 = lexer.Token('Q', 'select * from {{table_name}} limit 1', at(2, 1))
     self.assertEqual([l1, l2],
                      lexer.lex(
                          open('tests/sql/identifier.sql', 'r').read(),
                          ctx))
示例#7
0
 def test_basic(self):
     l1 = lexer.Token('C', '-- :name username_for_id :1', at(1, 1))
     l2 = lexer.Token(
         'Q', 'select username from users where user_id = :user_id',
         at(2, 1))
     self.assertEqual(
         [l1, l2],
         lexer.lex(open('tests/sql/basic.sql', 'r').read(), ctx))
示例#8
0
 def test_whitespace(self):
     l1 = lexer.Token('C', '-- :name username_for_id :1', at(1, 2))
     l2 = lexer.Token(
         'Q', 'select username from users where user_id = :user_id',
         at(2, 2))
     self.assertEqual(
         [l1, l2],
         lexer.lex(
             ' -- :name username_for_id :1  \n'
             ' select username from users where user_id = :user_id  ', ctx))
示例#9
0
 def test_blank_lines(self):
     l1 = lexer.Token('C', '-- :name username_for_id :1', at(1, 1))
     l2 = lexer.Token('Q', '', at(2, 1))
     l3 = lexer.Token(
         'Q', 'select username from users where user_id = :user_id',
         at(3, 1))
     self.assertEqual(
         [l1, l2, l3],
         lexer.lex(
             '-- :name username_for_id :1  \n'
             '\n'
             'select username from users where user_id = :user_id  ', ctx))
示例#10
0
 def test_keyword_only(self):
     self.assertEqual({
         'keyword': lexer.Token('K', ':foo', at(1, 4)),
         'rest': lexer.Token('S', '', at(1, 8)),
     }, lexer.lex_comment(self.tok('-- :foo')))
示例#11
0
 def test_internal_whitespace(self):
     self.assertEqual({
         'keyword': lexer.Token('K', ':foo', at(1, 5)),
         'rest': lexer.Token('S', 'bar :baz', at(1, 12)),
     }, lexer.lex_comment(self.tok('--  :foo   bar :baz')))
示例#12
0
 def test_multiple_keywords(self):
     self.assertEqual({
         'keyword': lexer.Token('K', ':foo', at(1, 4)),
         'rest': lexer.Token('S', 'bar :baz', at(1, 9)),
     }, lexer.lex_comment(self.tok('-- :foo bar :baz')))
示例#13
0
 def tok(self, comment):
     return lexer.Token('C', comment, at(1, 1))
示例#14
0
 def test_rest(self):
     self.assertEqual({
         'keyword': lexer.Token('K', ':raw', at(1, 1)),
         'rest': lexer.Token('S', ' stuff', at(1, 5)),
     }, lexer.lex_result(self.tok(':raw stuff')))
示例#15
0
 def tok(self, s):
     return lexer.Token('S', s, at(1, 1))
示例#16
0
 def test_no_space(self):
     self.assertEqual({
         'keyword': lexer.Token('K', ':foo', at(1, 3)),
         'rest': lexer.Token('S', '', at(1, 7)),
     }, lexer.lex_comment(self.tok('--:foo')))
示例#17
0
 def tok(self, rest):
     return lexer.Token('S', rest, at(1, 1))