Exemplo n.º 1
0
def test_multiple_tokens():
    assert (list(
        Lexer("""
    query {
        Node (search: "foo") {
            id
            name
        }
    }
    """)) == [
            token.SOF(0, 0),
            token.Name(5, 10, "query"),
            token.CurlyOpen(11, 12),
            token.Name(21, 25, "Node"),
            token.ParenOpen(26, 27),
            token.Name(27, 33, "search"),
            token.Colon(33, 34),
            token.String(35, 40, "foo"),
            token.ParenClose(40, 41),
            token.CurlyOpen(42, 43),
            token.Name(56, 58, "id"),
            token.Name(71, 75, "name"),
            token.CurlyClose(84, 85),
            token.CurlyClose(90, 91),
            token.EOF(96, 96),
        ])
Exemplo n.º 2
0
def test_skip_whitespace_and_comments_1():
    assert (lex_one("""

    foo


    """) == token.Name(6, 9, "foo"))
Exemplo n.º 3
0
def test_skip_whitespace_and_comments_3():
    assert lex_one(",,,foo,,,") == token.Name(3, 6, "foo")
Exemplo n.º 4
0
def test_skip_whitespace_and_comments_2():
    assert (lex_one("""
    #comment
    foo#comment
    """) == token.Name(18, 21, "foo"))
Exemplo n.º 5
0
def test_accept_binary_type():
    assert lex_one(b"foo") == token.Name(0, 3, "foo")
Exemplo n.º 6
0
def test_accept_bom_header():
    assert lex_one("\uFEFF foo") == token.Name(2, 5, "foo")
Exemplo n.º 7
0
def test_ignore_trailing_whitespace_2():
    assert [
        token.SOF(0, 0),
        token.Name(1, 4, "foo"),
        token.EOF(11, 11),
    ] == list(Lexer(" foo     \n#"))
Exemplo n.º 8
0
def test_ignore_trailing_whitespace():
    assert [token.SOF(0, 0),
            token.Name(1, 4, "foo"),
            token.EOF(9, 9)] == list(Lexer(" foo     "))
Exemplo n.º 9
0
            """)

    assert str(exc_info.value) == ('Unexpected character "?" (3:17):\n'
                                   "  1:\n"
                                   "  2:\n"
                                   "  3:                ?\n"
                                   "                    ^\n"
                                   "  4:\n"
                                   "  5:            \n")


@pytest.mark.parametrize(
    "value,expected",
    [
        ("abc", token.Name(0, 3, "abc")),
        ("_abc", token.Name(0, 4, "_abc")),
        ("abc_", token.Name(0, 4, "abc_")),
        ("abc123", token.Name(0, 6, "abc123")),
        ("abc_123", token.Name(0, 7, "abc_123")),
    ],
)
def test_name(value, expected):
    assert lex_one(value) == expected


@pytest.mark.parametrize(
    "value,expected",
    [
        ('"simple"', token.String(0, 8, "simple")),
        ('" white space "', token.String(0, 15, " white space ")),