def test__templater_dbt_templating_test_lex( project_dir, dbt_templater, fname # noqa: F811 ): """Demonstrate the lexer works on both dbt models and dbt tests. Handle any number of newlines. """ source_fpath = os.path.join(project_dir, fname) with open(source_fpath, "r") as source_dbt_model: source_dbt_sql = source_dbt_model.read() n_trailing_newlines = len(source_dbt_sql) - len( source_dbt_sql.rstrip("\n")) lexer = Lexer(config=FluffConfig(configs=DBT_FLUFF_CONFIG)) templated_file, _ = dbt_templater.process( in_str="", fname=os.path.join(project_dir, fname), config=FluffConfig(configs=DBT_FLUFF_CONFIG), ) tokens, lex_vs = lexer.lex(templated_file) assert (templated_file.source_str == "select a\nfrom table_a" + "\n" * n_trailing_newlines) assert (templated_file.templated_str == "select a\nfrom table_a" + "\n" * n_trailing_newlines)
def test__templater_dbt_templating_test_lex(in_dbt_project_dir, dbt_templater): # noqa """A test to demonstrate _tests_as_models works on dbt tests by temporarily making them models.""" lexer = Lexer(config=FluffConfig(configs=DBT_FLUFF_CONFIG)) templated_file, _ = dbt_templater.process( in_str="", fname="tests/test.sql", config=FluffConfig(configs=DBT_FLUFF_CONFIG), ) tokens, lex_vs = lexer.lex(templated_file) assert templated_file.source_str == "select * from a" assert templated_file.templated_str == "select * from a"
def test__templater_dbt_templating_test_lex( in_dbt_project_dir, dbt_templater, fname # noqa ): """A test to demonstrate the lexer works on both dbt models (with any # of trailing newlines) and dbt tests.""" with open(fname, "r") as source_dbt_model: source_dbt_sql = source_dbt_model.read() n_trailing_newlines = len(source_dbt_sql) - len( source_dbt_sql.rstrip("\n")) lexer = Lexer(config=FluffConfig(configs=DBT_FLUFF_CONFIG)) templated_file, _ = dbt_templater.process( in_str="", fname=fname, config=FluffConfig(configs=DBT_FLUFF_CONFIG), ) tokens, lex_vs = lexer.lex(templated_file) assert (templated_file.source_str == "select a\nfrom table_a" + "\n" * n_trailing_newlines) assert (templated_file.templated_str == "select a\nfrom table_a" + "\n" * n_trailing_newlines)
"""A basic timing function.""" # Do the timing time = timeit.timeit(func, number=iterations) / iterations # Output the result print( "{:<35} {:.6}s [{} iterations]".format( f"Time to {name}:", time, iterations, ) ) # Set up some classes to process the data kwargs = dict(dialect="ansi") lexer = Lexer(**kwargs) parser = Parser(**kwargs) linter = Linter(**kwargs) # Pre-process the lexing step for the parsing step tokens, _ = lexer.lex(sql) # Pre-process the parsing step for the linting and parsing step parsed = parser.parse(tokens) # Time the steps time_function(lambda: lexer.lex(sql), name="lex") time_function(lambda: parser.parse(tokens, recurse=0), name="parse (one level only)") time_function(lambda: parser.parse(tokens), name="parse (recursive)") time_function(lambda: linter.lint(parsed), name="lint") time_function(lambda: linter.fix(parsed), name="fix")