Ejemplo n.º 1
0
def test_parse_list_with_inner_comment():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.WhitespaceToken(" ", 1),
        tokens.ListStartToken(1),
        tokens.WhitespaceToken("\n  ", 1),
        tokens.LiteralToken("view_name.field_one", 2),
        tokens.CommaToken(2),
        tokens.WhitespaceToken("\n  ", 2),
        tokens.LiteralToken("view_name.field_two", 3),
        tokens.WhitespaceToken(" ", 3),
        tokens.CommentToken("# This is a comment", 3),
        tokens.WhitespaceToken("\n", 3),
        tokens.ListEndToken(4),
        tokens.StreamEndToken(4),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result == ListNode(
        type=SyntaxToken("drill_fields", 1),
        colon=Colon(line_number=1, suffix=" "),
        left_bracket=LeftBracket(),
        items=(
            SyntaxToken("view_name.field_one", 2, prefix="\n  "),
            SyntaxToken("view_name.field_two",
                        3,
                        prefix="\n  ",
                        suffix=" # This is a comment\n"),
        ),
        right_bracket=RightBracket(),
    )
Ejemplo n.º 2
0
def test_scan_comment_with_surrounding_whitespace():
    text = "\n# A comment\n "
    output = lkml.Lexer(text).scan()
    assert output == (
        tokens.StreamStartToken(1),
        tokens.WhitespaceToken("\n", 1),
        tokens.CommentToken("# A comment", 2),
        tokens.WhitespaceToken("\n ", 2),
        tokens.StreamEndToken(3),
    )
Ejemplo n.º 3
0
    def scan_comment(self) -> tokens.CommentToken:
        r"""Returns a token from a comment.

        The initial pound (#) character is consumed in the scan method, so this
        method only scans for a newline or end of file to indicate the end of the token.

        The pound character is added back to the beginning to the token to emphasize
        the importance of any leading whitespace that follows.

        Example:
            >>> lexer = Lexer(" Disregard this line\n")
            >>> lexer.scan_comment()
            CommentToken(# Disregard this line)

        """
        chars = "#"
        while self.peek() not in "\0\n":
            chars += self.consume()
        return tokens.CommentToken(chars, self.line_number)
Ejemplo n.º 4
0
def test_parse_list_with_only_comment():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.WhitespaceToken(" ", 1),
        tokens.ListStartToken(1),
        tokens.WhitespaceToken("\n  ", 1),
        tokens.CommentToken("# Put some fields here", 2),
        tokens.WhitespaceToken("\n", 2),
        tokens.ListEndToken(3),
        tokens.StreamEndToken(3),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result == ListNode(
        type=SyntaxToken("drill_fields"),
        left_bracket=LeftBracket(),
        items=tuple(),
        right_bracket=RightBracket(prefix="\n  # Put some fields here\n"),
    )
Ejemplo n.º 5
0
def test_parse_list_with_trailing_comment():
    stream = (
        tokens.LiteralToken("drill_fields", 1),
        tokens.ValueToken(1),
        tokens.WhitespaceToken(" ", 1),
        tokens.ListStartToken(1),
        tokens.LiteralToken("view_name.field_one", 1),
        tokens.CommaToken(1),
        tokens.LiteralToken("view_name.field_two", 1),
        tokens.ListEndToken(1),
        tokens.WhitespaceToken(" ", 1),
        tokens.CommentToken("# This is a comment", 1),
        tokens.StreamEndToken(1),
    )
    parser = lkml.parser.Parser(stream)
    result = parser.parse_list()
    assert result == ListNode(
        type=SyntaxToken("drill_fields"),
        left_bracket=LeftBracket(),
        items=(SyntaxToken("view_name.field_one"), SyntaxToken("view_name.field_two")),
        right_bracket=RightBracket(suffix=" # This is a comment"),
    )
Ejemplo n.º 6
0
def test_scan_comment():
    text = "# Make this better \n"
    lexer = lkml.Lexer(text)
    lexer.index = 1
    token = lexer.scan_comment()
    assert token == tokens.CommentToken("# Make this better ", 1)