def test_adapt_tokens__add_nested_type_hint():
    assert PythonTypeParser.adapt_tokens([
        Token("Type", TokenCategory.NAME),
    ], [
        Token("[", TokenCategory.NESTED_START),
        Token("NestedType", TokenCategory.NAME),
        Token("]", TokenCategory.NESTED_END),
    ]) == [
        Token("Type", TokenCategory.NAME),
        Token("[", TokenCategory.NESTED_START),
        Token("NestedType", TokenCategory.NAME),
        Token("]", TokenCategory.NESTED_END),
    ]
    driver_mock = MagicMock()
    type_ref = JavaTypeParser.parse_xml(type_element, driver=driver_mock)

    assert type_ref is not None
    assert type_ref.language == "java"
    assert type_ref.prefix == "<T extends Getter<?>> "
    assert type_ref.name == "T"
    assert not type_ref.suffix
    assert not type_ref.nested

    driver_mock.unresolved_ref.assert_not_called()


@pytest.mark.parametrize("tokens,expected", [
    ([
        Token("?", TokenCategory.WILDCARD),
        Token(" ", TokenCategory.WHITESPACE),
        Token("extends", TokenCategory.WILDCARD_BOUNDS),
        Token(" ", TokenCategory.WHITESPACE),
        Token("MyType", TokenCategory.NAME),
    ], [
        Token("?", TokenCategory.WILDCARD),
        Token(" ", TokenCategory.WHITESPACE),
        Token("extends", TokenCategory.WILDCARD_BOUNDS),
        Token(" ", TokenCategory.WHITESPACE),
        Token("MyType", TokenCategory.NAME),
    ]),
    ([
        Token("T", TokenCategory.NAME),
        Token(" ", TokenCategory.WHITESPACE),
        Token("extends", TokenCategory.WILDCARD_BOUNDS),
def namespace_sep(text: str = ":") -> Token:
    return Token(text, TokenCategory.NAMESPACE_SEPARATOR)
Ejemplo n.º 4
0
def block(text: str = "^") -> Token:
    return Token(text, TokenCategory.BLOCK)
def test_adapt_tokens__add_nested_type_hint__double_nested_with_whitespace():
    assert PythonTypeParser.adapt_tokens(
        [
            Token("Type", TokenCategory.NAME),
            Token("]", TokenCategory.NESTED_END),  # Bug in Doxygen
            Token(" ", TokenCategory.WHITESPACE),
        ],
        [
            Token("[", TokenCategory.NESTED_START),
            Token("NestedType", TokenCategory.NAME),
            Token("[", TokenCategory.NESTED_START),
            Token("DoubleNestedType", TokenCategory.NAME),
            Token("]", TokenCategory.NESTED_END),
        ]) == [
            Token("Type", TokenCategory.NAME),
            Token("[", TokenCategory.NESTED_START),
            Token("NestedType", TokenCategory.NAME),
            Token("[", TokenCategory.NESTED_START),
            Token("DoubleNestedType", TokenCategory.NAME),
            Token("]", TokenCategory.NESTED_END),
            Token("]", TokenCategory.NESTED_END),
        ]
def test_adapt_tokens__remove_def():
    assert PythonTypeParser.adapt_tokens([
        Token(" ", TokenCategory.WHITESPACE),
        Token("def", TokenCategory.NAME),
        Token(" ", TokenCategory.WHITESPACE),
        Token("Type", TokenCategory.NAME),
        Token("[", TokenCategory.NESTED_START),
        Token("def", TokenCategory.NAME),
        Token(",", TokenCategory.NESTED_SEPARATOR),
        Token("OtherType", TokenCategory.NAME),
        Token("]", TokenCategory.NESTED_END),
        Token(" ", TokenCategory.WHITESPACE),
    ]) == [
        Token(" ", TokenCategory.WHITESPACE),
        Token(" ", TokenCategory.WHITESPACE),
        Token("Type", TokenCategory.NAME),
        Token("[", TokenCategory.NESTED_START),
        Token(",", TokenCategory.NESTED_SEPARATOR),
        Token("OtherType", TokenCategory.NAME),
        Token("]", TokenCategory.NESTED_END),
        Token(" ", TokenCategory.WHITESPACE),
    ]