Пример #1
0
def test__templater_dbt_templating_test_lex(
        project_dir,
        dbt_templater,
        fname  # noqa: F811
):
    """Demonstrate the lexer works on both dbt models and dbt tests.

    Handle any number of newlines.
    """
    source_fpath = os.path.join(project_dir, fname)
    with open(source_fpath, "r") as source_dbt_model:
        source_dbt_sql = source_dbt_model.read()
    n_trailing_newlines = len(source_dbt_sql) - len(
        source_dbt_sql.rstrip("\n"))
    lexer = Lexer(config=FluffConfig(configs=DBT_FLUFF_CONFIG))
    templated_file, _ = dbt_templater.process(
        in_str="",
        fname=os.path.join(project_dir, fname),
        config=FluffConfig(configs=DBT_FLUFF_CONFIG),
    )
    tokens, lex_vs = lexer.lex(templated_file)
    assert (templated_file.source_str == "select a\nfrom table_a" +
            "\n" * n_trailing_newlines)
    assert (templated_file.templated_str == "select a\nfrom table_a" +
            "\n" * n_trailing_newlines)
Пример #2
0
def test__templater_dbt_templating_test_lex(in_dbt_project_dir, dbt_templater):  # noqa
    """A test to demonstrate _tests_as_models works on dbt tests by temporarily making them models."""
    lexer = Lexer(config=FluffConfig(configs=DBT_FLUFF_CONFIG))
    templated_file, _ = dbt_templater.process(
        in_str="",
        fname="tests/test.sql",
        config=FluffConfig(configs=DBT_FLUFF_CONFIG),
    )
    tokens, lex_vs = lexer.lex(templated_file)
    assert templated_file.source_str == "select * from a"
    assert templated_file.templated_str == "select * from a"
Пример #3
0
def test__api__lexer():
    """Basic checking of lexing functionality."""
    tokens, violations = Lexer(dialect="ansi").lex(test_query)
    assert violations == []
    assert isinstance(tokens, tuple)
    # The last element is the file end marker.
    assert [elem.raw for elem in tokens] == ["SELECt", " ", "1", ""]
Пример #4
0
def test__templater_dbt_templating_test_lex(
        in_dbt_project_dir,
        dbt_templater,
        fname  # noqa
):
    """A test to demonstrate the lexer works on both dbt models (with any # of trailing newlines) and dbt tests."""
    with open(fname, "r") as source_dbt_model:
        source_dbt_sql = source_dbt_model.read()
    n_trailing_newlines = len(source_dbt_sql) - len(
        source_dbt_sql.rstrip("\n"))
    lexer = Lexer(config=FluffConfig(configs=DBT_FLUFF_CONFIG))
    templated_file, _ = dbt_templater.process(
        in_str="",
        fname=fname,
        config=FluffConfig(configs=DBT_FLUFF_CONFIG),
    )
    tokens, lex_vs = lexer.lex(templated_file)
    assert (templated_file.source_str == "select a\nfrom table_a" +
            "\n" * n_trailing_newlines)
    assert (templated_file.templated_str == "select a\nfrom table_a" +
            "\n" * n_trailing_newlines)
Пример #5
0
def test__api__lexer():
    """Basic checking of lexing functionality."""
    tokens, violations = Lexer().lex(test_query)
    assert violations == []
    assert isinstance(tokens, tuple)
    assert [elem.raw for elem in tokens] == ["SELECt", " ", "1"]
Пример #6
0
def test__api__linter_fix():
    """Basic checking of parsing functionality."""
    tokens, _ = Lexer().lex(test_query)
    parsed = Parser().parse(tokens)
    fixed, _ = Linter().fix(parsed)
    assert fixed.raw == "SELECT 1\n"
Пример #7
0
def test__api__linter_lint():
    """Basic checking of parsing functionality."""
    tokens, _ = Lexer().lex(test_query)
    parsed = Parser().parse(tokens)
    violations = Linter().lint(parsed)
    assert [v.rule.code for v in violations] == ["L009", "L010"]
Пример #8
0
def test__api__parser():
    """Basic checking of parsing functionality."""
    tokens, _ = Lexer().lex(test_query)
    parsed = Parser().parse(tokens)
    assert parsed.raw == test_query
Пример #9
0
    """A basic timing function."""
    # Do the timing
    time = timeit.timeit(func, number=iterations) / iterations
    # Output the result
    print(
        "{:<35} {:.6}s [{} iterations]".format(
            f"Time to {name}:",
            time,
            iterations,
        )
    )


# Set up some classes to process the data
kwargs = dict(dialect="ansi")
lexer = Lexer(**kwargs)
parser = Parser(**kwargs)
linter = Linter(**kwargs)

# Pre-process the lexing step for the parsing step
tokens, _ = lexer.lex(sql)
# Pre-process the parsing step for the linting and parsing step
parsed = parser.parse(tokens)

# Time the steps
time_function(lambda: lexer.lex(sql), name="lex")
time_function(lambda: parser.parse(tokens, recurse=0), name="parse (one level only)")
time_function(lambda: parser.parse(tokens), name="parse (recursive)")
time_function(lambda: linter.lint(parsed), name="lint")
time_function(lambda: linter.fix(parsed), name="fix")