def test_linebreaks(self): # issue1 sql = 'foo\nbar\n' tokens = lexer.tokenize(sql) self.assertEqual(''.join(str(x[1]) for x in tokens), sql) sql = 'foo\rbar\r' tokens = lexer.tokenize(sql) self.assertEqual(''.join(str(x[1]) for x in tokens), sql) sql = 'foo\r\nbar\r\n' tokens = lexer.tokenize(sql) self.assertEqual(''.join(str(x[1]) for x in tokens), sql) sql = 'foo\r\nbar\n' tokens = lexer.tokenize(sql) self.assertEqual(''.join(str(x[1]) for x in tokens), sql)
def test_simple(self): sql = 'select * from foo;' stream = lexer.tokenize(sql) self.assert_(type(stream) is types.GeneratorType) tokens = list(stream) self.assertEqual(len(tokens), 8) self.assertEqual(len(tokens[0]), 2) self.assertEqual(tokens[0], (Keyword.DML, u'select')) self.assertEqual(tokens[-1], (Punctuation, u';'))
def test_inline_keywords(self): # issue 7 sql = "create created_foo" tokens = list(lexer.tokenize(sql)) self.assertEqual(len(tokens), 3) self.assertEqual(tokens[0][0], Keyword.DDL) self.assertEqual(tokens[2][0], Name) self.assertEqual(tokens[2][1], u'created_foo') sql = "enddate" tokens = list(lexer.tokenize(sql)) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0][0], Name) sql = "join_col" tokens = list(lexer.tokenize(sql)) self.assertEqual(len(tokens), 1) self.assertEqual(tokens[0][0], Name) sql = "left join_col" tokens = list(lexer.tokenize(sql)) self.assertEqual(len(tokens), 3) self.assertEqual(tokens[2][0], Name) self.assertEqual(tokens[2][1], 'join_col')
def test_negative_numbers(self): sql = "values(-1)" tokens = list(lexer.tokenize(sql)) self.assertEqual(len(tokens), 4) self.assertEqual(tokens[2][0], Number.Integer) self.assertEqual(tokens[2][1], '-1')
def test_backticks(self): sql = '`foo`.`bar`' tokens = list(lexer.tokenize(sql)) self.assertEqual(len(tokens), 3) self.assertEqual(tokens[0], (Name, u'`foo`'))