def test_tokenize(sql, expected_tokens): tokens = [ Token(token_type, token_value) for token_type, token_value in expected_tokens ] assert_tokens(tokens, tokenize(sql))
def format_sql(s, debug=False): tokens = list(tokenize(s)) if debug: print_non_data('Tokens: %s' % tokens) parsed = list(parse(tokens)) if debug: print_non_data('Statements: %s' % parsed) styled = style(parsed) if debug: print_non_data('Output: %s' % styled) return styled
def test_tokenize_select_2(select_2): assert_tokens(select_2.tokens, tokenize(select_2.sql))
def test_tokenize_select_3(select_3): assert_tokens(select_3.tokens, tokenize(select_3.sql))
def test_tokenize_insert3(insert_3): assert_tokens(insert_3.tokens, tokenize(insert_3.sql))
def test_tokenize_case_1(case_1): assert_tokens(case_1.tokens, tokenize(case_1.sql))
def test_tokenize_where_11(where_11): assert_tokens(where_11.tokens, tokenize(where_11.sql))
def test_tokenize_composition_3(composition_3): assert_tokens(composition_3.tokens, tokenize(composition_3.sql))
def test_tokenize_where_6(where_6): assert_tokens(where_6.tokens, tokenize(where_6.sql))
def test_tokenize_where_7(where_7): assert_tokens(where_7.tokens, tokenize(where_7.sql))
def test_tokenize_where_4(where_4): assert_tokens(where_4.tokens, tokenize(where_4.sql))
def test_tokenize_where_5(where_5): assert_tokens(where_5.tokens, tokenize(where_5.sql))
def test_tokenize_where_3(where_3): assert_tokens(where_3.tokens, tokenize(where_3.sql))
def test_tokenize_select_4(select_4): assert_tokens(select_4.tokens, tokenize(select_4.sql))
def test_tokenize_where_8(where_8): assert_tokens(where_8.tokens, tokenize(where_8.sql))
def test_tokenize_where_9(where_9): assert_tokens(where_9.tokens, tokenize(where_9.sql))
def test_tokenize_composition_1(composition_1): assert_tokens(composition_1.tokens, tokenize(composition_1.sql))
def test_tokenize_where_10(where_10): assert_tokens(where_10.tokens, tokenize(where_10.sql))
def test_tokenize_insert1(insert_1): assert_tokens(insert_1.tokens, tokenize(insert_1.sql))
def test_tokenize_order_by_4(order_by_4): assert_tokens(order_by_4.tokens, tokenize(order_by_4.sql))
def test_tokenize_between_1(between_1): assert_tokens(between_1.tokens, tokenize(between_1.sql))
def test_tokenize_select_1(select_1): assert_tokens(select_1.tokens, tokenize(select_1.sql))
def test_tokenize_where_12(where_12): assert_tokens(where_12.tokens, tokenize(where_12.sql))
def test_tokenize_composition_2(composition_2): assert_tokens(composition_2.tokens, tokenize(composition_2.sql))
def test_tokenize_multiple_statements1(multiple_statements_1): assert_tokens(multiple_statements_1.tokens, tokenize(multiple_statements_1.sql))
def test_tokenize_insert2(insert_2): assert_tokens(insert_2.tokens, tokenize(insert_2.sql))
def test_tokenize_insert4(insert_4): assert_tokens(insert_4.tokens, tokenize(insert_4.sql))
def test_tokenize_like_1(like_1): assert_tokens(like_1.tokens, tokenize(like_1.sql))
def test_tokenize_case_2(case_2): assert_tokens(case_2.tokens, tokenize(case_2.sql))
def test_tokenize(sql, expected_tokens): tokens = [Token(token_type, token_value) for token_type, token_value in expected_tokens] assert_tokens(tokens, tokenize(sql))
def test_tokenize_order_by_3(order_by_3): assert_tokens(order_by_3.tokens, tokenize(order_by_3.sql))