コード例 #1
0
 def produces_double_linked_list_of_tokens_including_comments():
     source = Source("""
         {
           #comment
           field
         }
         """)
     lexer = Lexer(source)
     start_token = lexer.token
     while True:
         end_token = lexer.advance()
         if end_token.kind == TokenKind.EOF:
             break
         assert end_token.kind != TokenKind.COMMENT
     assert start_token.prev is None
     assert end_token.next is None
     tokens: List[Token] = []
     tok: Optional[Token] = start_token
     while tok:
         assert not tokens or tok.prev == tokens[-1]
         tokens.append(tok)
         tok = tok.next
     assert [tok.kind for tok in tokens] == [
         TokenKind.SOF,
         TokenKind.BRACE_L,
         TokenKind.COMMENT,
         TokenKind.NAME,
         TokenKind.BRACE_R,
         TokenKind.EOF,
     ]
コード例 #2
0
 def lex_reports_useful_information_for_dashes_in_names():
     q = "a-b"
     lexer = Lexer(Source(q))
     first_token = lexer.advance()
     assert first_token == Token(TokenKind.NAME, 0, 1, 1, 1, None, "a")
     with raises(GraphQLSyntaxError) as exc_info:
         lexer.advance()
     error = exc_info.value
     assert error.message == (
         "Syntax Error: Invalid number, expected digit but got: 'b'.")
     assert error.locations == [(1, 3)]
コード例 #3
0
ファイル: test_lexer.py プロジェクト: dfee/graphql-core-next
 def produces_double_linked_list_of_tokens_including_comments():
     lexer = Lexer(Source('{\n      #comment\n      field\n    }'))
     start_token = lexer.token
     while True:
         end_token = lexer.advance()
         if end_token.kind == TokenKind.EOF:
             break
         assert end_token.kind != TokenKind.COMMENT
     assert start_token.prev is None
     assert end_token.next is None
     tokens = []
     tok = start_token
     while tok:
         assert not tokens or tok.prev == tokens[-1]
         tokens.append(tok)
         tok = tok.next
     assert [tok.kind.value for tok in tokens
             ] == ['<SOF>', '{', 'Comment', 'Name', '}', '<EOF>']
コード例 #4
0
    def updates_column_numbers_in_error_for_file_context():
        source = Source("?", "foo.js", SourceLocation(1, 5))
        with raises(GraphQLSyntaxError) as exc_info:
            Lexer(source).advance()
        assert str(exc_info.value) == dedent("""
            Syntax Error: Cannot parse the unexpected character '?'.

            foo.js (1:5)
            1:     ?
                   ^
            """)
コード例 #5
0
    def updates_column_numbers_in_error_for_file_context():
        source = Source("~", "foo.js", SourceLocation(1, 5))
        with raises(GraphQLSyntaxError) as exc_info:
            Lexer(source).advance()
        assert str(exc_info.value) == dedent(
            """
            Syntax Error: Unexpected character: '~'.

            foo.js:1:5
            1 |     ~
              |     ^
            """
        )
コード例 #6
0
    def updates_line_numbers_in_error_for_file_context():
        s = "\n\n     ?\n\n"
        source = Source(s, "foo.js", SourceLocation(11, 12))
        with raises(GraphQLSyntaxError) as exc_info:
            Lexer(source).advance()
        assert str(exc_info.value) == dedent("""
            Syntax Error: Cannot parse the unexpected character '?'.

            foo.js (13:6)
            12:\x20
            13:      ?
                     ^
            14:\x20
            """)
コード例 #7
0
    def updates_line_numbers_in_error_for_file_context():
        s = "\n\n     ~\n\n"
        source = Source(s, "foo.js", SourceLocation(11, 12))
        with raises(GraphQLSyntaxError) as exc_info:
            Lexer(source).advance()
        assert str(exc_info.value) == dedent(
            """
            Syntax Error: Unexpected character: '~'.

            foo.js:13:6
            12 |
            13 |      ~
               |      ^
            14 |
            """
        )
コード例 #8
0
def lex_one(s):
    lexer = Lexer(Source(s))
    return lexer.advance()
コード例 #9
0
def lex_value(s: str) -> Optional[str]:
    lexer = Lexer(Source(s))
    value = lexer.advance().value
    assert lexer.advance().kind == TokenKind.EOF, "Expected EOF"
    return value
コード例 #10
0
def lex_second(s: str) -> Token:
    lexer = Lexer(Source(s))
    lexer.advance()
    return lexer.advance()
コード例 #11
0
def lex_one(s: str) -> Token:
    lexer = Lexer(Source(s))
    return lexer.advance()
コード例 #12
0
def lex_value(s: str) -> str:
    lexer = Lexer(Source(s))
    value = lexer.advance().value
    assert lexer.advance().kind == TokenKind.EOF
    return value
コード例 #13
0
def lex_value(s: str) -> str:
    lexer = Lexer(Source(s))
    value = lexer.advance().value
    assert isinstance(value, str)
    assert lexer.advance().kind == TokenKind.EOF, "Expected EOF"
    return value