コード例 #1
0
def test_parse_block_with_no_expression():
    stream = (
        tokens.LiteralToken("dimension", 1),
        tokens.ValueToken(1),
        tokens.LiteralToken("dimension_name", 1),
        tokens.BlockStartToken(1),
        tokens.BlockEndToken(1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_block()
    assert result == {"dimension": {"name": "dimension_name"}}
コード例 #2
0
def test_parse_block_without_closing_curly_brace():
    stream = (
        tokens.LiteralToken("view", 1),
        tokens.ValueToken(1),
        tokens.BlockStartToken(1),
        tokens.LiteralToken("hidden", 2),
        tokens.ValueToken(2),
        tokens.LiteralToken("yes", 2),
        tokens.StreamEndToken(3),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_block()
    assert result is None
コード例 #3
0
    def scan(self) -> Tuple[tokens.Token, ...]:
        self.tokens.append(tokens.StreamStartToken(self.line_number))
        while True:
            self.scan_until_token()
            ch = self.peek()
            if ch == "\0":
                self.tokens.append(tokens.StreamEndToken(self.line_number))
                break
            elif ch == "{":
                self.advance()
                self.tokens.append(tokens.BlockStartToken(self.line_number))
            elif ch == "}":
                self.advance()
                self.tokens.append(tokens.BlockEndToken(self.line_number))
            elif ch == "[":
                self.advance()
                self.tokens.append(tokens.ListStartToken(self.line_number))
            elif ch == "]":
                self.advance()
                self.tokens.append(tokens.ListEndToken(self.line_number))
            elif ch == ",":
                self.advance()
                self.tokens.append(tokens.CommaToken(self.line_number))
            elif ch == ":":
                self.advance()
                self.tokens.append(tokens.ValueToken(self.line_number))
            elif ch == ";":
                if self.peek_multiple(2) == ";;":
                    self.advance(2)
                    self.tokens.append(tokens.ExpressionBlockEndToken(self.line_number))
            elif ch == '"':
                self.advance()
                self.tokens.append(self.scan_quoted_literal())
            elif (
                self.peek_multiple(3) == "sql"
                or self.peek_multiple(4) == "html"
                or self.peek_multiple(24) == "expression_custom_filter"
            ):
                self.tokens.append(self.scan_literal())
                self.scan_until_token()
                self.advance()
                self.tokens.append(tokens.ValueToken(self.line_number))
                self.scan_until_token()
                self.tokens.append(self.scan_expression_block())
            else:
                # TODO: This should actually check for valid literals first
                # and throw an error if it doesn't match
                self.tokens.append(self.scan_literal())

        return tuple(self.tokens)
コード例 #4
0
ファイル: test_parser.py プロジェクト: LewisCharlesBaker/lkml
def test_parse_block_with_no_expression():
    stream = (
        tokens.LiteralToken("dimension", 1),
        tokens.ValueToken(1),
        tokens.WhitespaceToken(" ", 1),
        tokens.LiteralToken("dimension_name", 1),
        tokens.WhitespaceToken(" ", 1),
        tokens.BlockStartToken(1),
        tokens.BlockEndToken(1),
        tokens.WhitespaceToken(" ", 1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_block()
    assert result == BlockNode(
        type=SyntaxToken("dimension"),
        name=SyntaxToken("dimension_name"),
        left_brace=LeftCurlyBrace(prefix=" "),
        container=ContainerNode(items=tuple()),
        right_brace=RightCurlyBrace(suffix=" "),
    )
コード例 #5
0
def parser():
    stream = (
        tokens.StreamStartToken(1),
        tokens.LiteralToken("view", 1),
        tokens.ValueToken(1),
        tokens.LiteralToken("view_name", 1),
        tokens.BlockStartToken(1),
        tokens.LiteralToken("sql_table_name", 2),
        tokens.ValueToken(2),
        tokens.ExpressionBlockToken("schema.table_name", 2),
        tokens.ExpressionBlockEndToken(2),
        tokens.LiteralToken("drill_fields", 3),
        tokens.ValueToken(3),
        tokens.ListStartToken(3),
        tokens.LiteralToken("view_name.field_one", 3),
        tokens.CommaToken(3),
        tokens.LiteralToken("view_name.field_two", 3),
        tokens.ListEndToken(3),
        tokens.BlockEndToken(4),
        tokens.StreamEndToken(4),
    )
    return lkml.parser.Parser(stream)
コード例 #6
0
ファイル: test_lexer.py プロジェクト: stjordanis/lkml
    lexer.scan_until_token()
    result = lexer.peek()
    assert result == "S"


def test_scan_until_token_skips_comments():
    text = "# This is a comment\nStart here"
    lexer = lkml.Lexer(text)
    lexer.scan_until_token()
    result = lexer.peek()
    assert result == "S"


params = [
    ("\0", tokens.StreamEndToken(1)),
    ("{", tokens.BlockStartToken(1)),
    ("}", tokens.BlockEndToken(1)),
    ("[", tokens.ListStartToken(1)),
    ("]", tokens.ListEndToken(1)),
    (",", tokens.CommaToken(1)),
    (":", tokens.ValueToken(1)),
    (";;", tokens.ExpressionBlockEndToken(1)),
]


@pytest.mark.parametrize("text,expected", params)
def test_scan_all_simple_tokens(text, expected):
    lexer = lkml.Lexer(text)
    result = lexer.scan()
    # Skip stream start token appended at the beginning
    assert result[1] == expected