def produces_double_linked_list_of_tokens_including_comments(): source = Source(""" { #comment field } """) lexer = Lexer(source) start_token = lexer.token while True: end_token = lexer.advance() if end_token.kind == TokenKind.EOF: break assert end_token.kind != TokenKind.COMMENT assert start_token.prev is None assert end_token.next is None tokens: List[Token] = [] tok: Optional[Token] = start_token while tok: assert not tokens or tok.prev == tokens[-1] tokens.append(tok) tok = tok.next assert [tok.kind for tok in tokens] == [ TokenKind.SOF, TokenKind.BRACE_L, TokenKind.COMMENT, TokenKind.NAME, TokenKind.BRACE_R, TokenKind.EOF, ]
def lex_reports_useful_information_for_dashes_in_names(): q = "a-b" lexer = Lexer(Source(q)) first_token = lexer.advance() assert first_token == Token(TokenKind.NAME, 0, 1, 1, 1, None, "a") with raises(GraphQLSyntaxError) as exc_info: lexer.advance() error = exc_info.value assert error.message == ( "Syntax Error: Invalid number, expected digit but got: 'b'.") assert error.locations == [(1, 3)]
def produces_double_linked_list_of_tokens_including_comments(): lexer = Lexer(Source('{\n #comment\n field\n }')) start_token = lexer.token while True: end_token = lexer.advance() if end_token.kind == TokenKind.EOF: break assert end_token.kind != TokenKind.COMMENT assert start_token.prev is None assert end_token.next is None tokens = [] tok = start_token while tok: assert not tokens or tok.prev == tokens[-1] tokens.append(tok) tok = tok.next assert [tok.kind.value for tok in tokens ] == ['<SOF>', '{', 'Comment', 'Name', '}', '<EOF>']
def updates_column_numbers_in_error_for_file_context(): source = Source("?", "foo.js", SourceLocation(1, 5)) with raises(GraphQLSyntaxError) as exc_info: Lexer(source).advance() assert str(exc_info.value) == dedent(""" Syntax Error: Cannot parse the unexpected character '?'. foo.js (1:5) 1: ? ^ """)
def updates_column_numbers_in_error_for_file_context(): source = Source("~", "foo.js", SourceLocation(1, 5)) with raises(GraphQLSyntaxError) as exc_info: Lexer(source).advance() assert str(exc_info.value) == dedent( """ Syntax Error: Unexpected character: '~'. foo.js:1:5 1 | ~ | ^ """ )
def updates_line_numbers_in_error_for_file_context(): s = "\n\n ?\n\n" source = Source(s, "foo.js", SourceLocation(11, 12)) with raises(GraphQLSyntaxError) as exc_info: Lexer(source).advance() assert str(exc_info.value) == dedent(""" Syntax Error: Cannot parse the unexpected character '?'. foo.js (13:6) 12:\x20 13: ? ^ 14:\x20 """)
def updates_line_numbers_in_error_for_file_context(): s = "\n\n ~\n\n" source = Source(s, "foo.js", SourceLocation(11, 12)) with raises(GraphQLSyntaxError) as exc_info: Lexer(source).advance() assert str(exc_info.value) == dedent( """ Syntax Error: Unexpected character: '~'. foo.js:13:6 12 | 13 | ~ | ^ 14 | """ )
def lex_one(s): lexer = Lexer(Source(s)) return lexer.advance()
def lex_value(s: str) -> Optional[str]: lexer = Lexer(Source(s)) value = lexer.advance().value assert lexer.advance().kind == TokenKind.EOF, "Expected EOF" return value
def lex_second(s: str) -> Token: lexer = Lexer(Source(s)) lexer.advance() return lexer.advance()
def lex_one(s: str) -> Token: lexer = Lexer(Source(s)) return lexer.advance()
def lex_value(s: str) -> str: lexer = Lexer(Source(s)) value = lexer.advance().value assert lexer.advance().kind == TokenKind.EOF return value
def lex_value(s: str) -> str: lexer = Lexer(Source(s)) value = lexer.advance().value assert isinstance(value, str) assert lexer.advance().kind == TokenKind.EOF, "Expected EOF" return value