def test_parse_value_literal_with_sql_block(): literal = "SELECT * FROM tablename" stream = ( tokens.LiteralToken(literal, 1), tokens.ExpressionBlockEndToken(1), tokens.StreamEndToken(1), ) parser = lkml.parser.Parser(stream) result = parser.parse_value() assert result == literal
def test_parse_pair_with_sql_block(): sql = "SELECT * FROM schema.table" stream = ( tokens.LiteralToken("sql", 1), tokens.ValueToken(1), tokens.LiteralToken(sql, 1), tokens.ExpressionBlockEndToken(1), tokens.StreamEndToken(1), ) parser = lkml.parser.Parser(stream) result = parser.parse_pair() assert result == {"sql": sql}
def test_scan_with_complex_sql_block(): text = ("sql_distinct_key: concat(${orders.order_id}, '|', " "${orders__items.primary_key}) ;;") output = lkml.Lexer(text).scan() assert output == ( tokens.StreamStartToken(1), tokens.LiteralToken("sql_distinct_key", 1), tokens.ValueToken(1), tokens.ExpressionBlockToken( " concat(${orders.order_id}, '|', ${orders__items.primary_key}) ", 1), tokens.ExpressionBlockEndToken(1), tokens.StreamEndToken(1), )
def scan(self) -> Tuple[tokens.Token, ...]: self.tokens.append(tokens.StreamStartToken(self.line_number)) while True: self.scan_until_token() ch = self.peek() if ch == "\0": self.tokens.append(tokens.StreamEndToken(self.line_number)) break elif ch == "{": self.advance() self.tokens.append(tokens.BlockStartToken(self.line_number)) elif ch == "}": self.advance() self.tokens.append(tokens.BlockEndToken(self.line_number)) elif ch == "[": self.advance() self.tokens.append(tokens.ListStartToken(self.line_number)) elif ch == "]": self.advance() self.tokens.append(tokens.ListEndToken(self.line_number)) elif ch == ",": self.advance() self.tokens.append(tokens.CommaToken(self.line_number)) elif ch == ":": self.advance() self.tokens.append(tokens.ValueToken(self.line_number)) elif ch == ";": if self.peek_multiple(2) == ";;": self.advance(2) self.tokens.append(tokens.ExpressionBlockEndToken(self.line_number)) elif ch == '"': self.advance() self.tokens.append(self.scan_quoted_literal()) elif ( self.peek_multiple(3) == "sql" or self.peek_multiple(4) == "html" or self.peek_multiple(24) == "expression_custom_filter" ): self.tokens.append(self.scan_literal()) self.scan_until_token() self.advance() self.tokens.append(tokens.ValueToken(self.line_number)) self.scan_until_token() self.tokens.append(self.scan_expression_block()) else: # TODO: This should actually check for valid literals first # and throw an error if it doesn't match self.tokens.append(self.scan_literal()) return tuple(self.tokens)
def test_parse_pair_with_sql_block(): sql = " SELECT * FROM schema.table " stream = ( tokens.LiteralToken("sql", 1), tokens.ValueToken(1), tokens.WhitespaceToken(" ", 1), tokens.ExpressionBlockToken(sql, 1), tokens.ExpressionBlockEndToken(1), tokens.StreamEndToken(1), ) parser = lkml.parser.Parser(stream) result = parser.parse_pair() assert result == PairNode( type=SyntaxToken("sql"), value=ExpressionSyntaxToken(sql.strip()) )
def parser(): stream = ( tokens.StreamStartToken(1), tokens.LiteralToken("view", 1), tokens.ValueToken(1), tokens.LiteralToken("view_name", 1), tokens.BlockStartToken(1), tokens.LiteralToken("sql_table_name", 2), tokens.ValueToken(2), tokens.ExpressionBlockToken("schema.table_name", 2), tokens.ExpressionBlockEndToken(2), tokens.LiteralToken("drill_fields", 3), tokens.ValueToken(3), tokens.ListStartToken(3), tokens.LiteralToken("view_name.field_one", 3), tokens.CommaToken(3), tokens.LiteralToken("view_name.field_two", 3), tokens.ListEndToken(3), tokens.BlockEndToken(4), tokens.StreamEndToken(4), ) return lkml.parser.Parser(stream)
text = "# This is a comment\nStart here" lexer = lkml.Lexer(text) lexer.scan_until_token() result = lexer.peek() assert result == "S" params = [ ("\0", tokens.StreamEndToken(1)), ("{", tokens.BlockStartToken(1)), ("}", tokens.BlockEndToken(1)), ("[", tokens.ListStartToken(1)), ("]", tokens.ListEndToken(1)), (",", tokens.CommaToken(1)), (":", tokens.ValueToken(1)), (";;", tokens.ExpressionBlockEndToken(1)), ] @pytest.mark.parametrize("text,expected", params) def test_scan_all_simple_tokens(text, expected): lexer = lkml.Lexer(text) result = lexer.scan() # Skip stream start token appended at the beginning assert result[1] == expected def test_scan_quoted_literal(): text = '"This is quoted text."' lexer = lkml.Lexer(text) lexer.index = 1