Пример #1
0
def test_escaping():
    assert lexer_single(
        r'"\tHello world\nand goodbye!"',
        without_end=True) == [InlineString('\tHello world\nand goodbye!')]
    assert lexer_single(
        r'"A message, \"and a quote\"."',
        without_end=True) == [InlineString('A message, "and a quote".')]
Пример #2
0
def test_simple_vectorization():
    tokens = lexer_single('person.walk("a", 1)')
    vector = parser.collect_vectors(tokens)

    validate_types(vector, [
        Identifier, LexicalAccess, Identifier,
        [InlineString, LexicalSeparator, NumericValue]
    ], TokenVector)
Пример #3
0
def test_complex_vectorization():
    tokens = lexer_single(
        'person.walk(8 * (1 + 3), location.random(2 * (4 + 9)))')
    vector = parser.collect_vectors(tokens)

    validate_types(vector, [
        Identifier, LexicalAccess, Identifier,
        [
            NumericValue, LexicalMultiplication,
            [NumericValue, LexicalAddition, NumericValue], LexicalSeparator,
            Identifier, LexicalAccess, Identifier,
            [
                NumericValue, LexicalMultiplication,
                [NumericValue, LexicalAddition, NumericValue]
            ]
        ]
    ], TokenVector)
Пример #4
0
def test_source_reference_lexical_output():
    code = "does start with number a, number b = 42"
    tokens = lexer_single(code, without_end=True)

    assert [code[token.source_ref.column_start:token.source_ref.column_end] for token in tokens] == \
           ['does', 'start', 'with', 'number', 'a', ',', 'number', 'b', '=', '42']
Пример #5
0
def test_extraneous_closing_tokens(source):
    tokens = lexer_single(source)
    with pytest.raises(UnexpectedVectorTermination):
        parser.collect_vectors(tokens)
Пример #6
0
def test_missing_closing_token(source):
    tokens = lexer_single(source)
    with pytest.raises(UnclosedVector):
        parser.collect_vectors(tokens)
Пример #7
0
def test_group_termination_errors(code):
    with pytest.raises(ValueError):
        lexer_single(code)
Пример #8
0
def test_indentation_handling():
    assert lexer_single(
        "\t\t\tid", without_end=True
    ) == [LexicalIndent('\t', None)] * 3 + [Identifier('id')]
Пример #9
0
def test_whitespace_handling():
    assert lexer_single("does start with number a, number b, number c") == \
           lexer_single("does   start with number a,number b,number c   ")
Пример #10
0
def test_empty_string():
    symbols = lexer_single('""', without_end=True)

    assert len(symbols) == 1
    assert isinstance(symbols[0], InlineString) and symbols[0].value == ""