Exemplo n.º 1
0
 def make_token(self, data, **kwargs):
     return (
         Number.of(
             data["name_parts"],
             data["modifiers"],
             data["flags"],
             data["sign"],
             data["sub_index"],
         )
         .set_enclosure_type(frozenset(data["enclosure_type"]))
         .set_erasure(data["erasure"])
     )
def test_number(
    name_parts,
    modifiers,
    flags,
    sign,
    expected_value,
    expected_clean_value,
    expected_name,
) -> None:
    number = Number.of(name_parts, modifiers, flags, sign)

    expected_sub_index = 1
    expected_parts = (*name_parts, sign) if sign else name_parts
    assert number.value == expected_value
    assert number.clean_value == expected_clean_value
    assert (
        number.get_key() ==
        f"Number⁝{expected_value}⟨{'⁚'.join(token.get_key() for token in expected_parts)}⟩"
    )
    assert number.name_parts == name_parts
    assert number.name == expected_name
    assert number.sub_index == expected_sub_index
    assert number.modifiers == tuple(modifiers)
    assert number.flags == tuple(flags)
    assert number.lemmatizable is False
    assert number.sign == sign

    serialized = {
        "type": "Number",
        "name": expected_name,
        "nameParts": OneOfTokenSchema().dump(name_parts, many=True),
        "modifiers": modifiers,
        "subIndex": expected_sub_index,
        "flags": [flag.value for flag in flags],
        "sign": sign and OneOfTokenSchema().dump(sign),
    }
    assert_token_serialization(number, serialized)
 def ebl_atf_text_line__number(self, number, modifiers, flags, sign=None):
     return Number.of(tuple(number.children), modifiers, flags, sign)
 Word.of((
     Variant.of(
         Reading.of((
             ValueToken.of("ku"),
             BrokenAway.open(),
             ValueToken(
                 frozenset({EnclosureType.BROKEN_AWAY}),
                 ErasureState.NONE,
                 "r",
             ),
         )),
         Number.of((
             ValueToken.of("12"),
             BrokenAway.open(),
             ValueToken(
                 frozenset({EnclosureType.BROKEN_AWAY}),
                 ErasureState.NONE,
                 "3",
             ),
         )),
     ),
     Joiner.hyphen().set_enclosure_type(
         frozenset({EnclosureType.BROKEN_AWAY})),
     UnclearSign.of().set_enclosure_type(
         frozenset({EnclosureType.BROKEN_AWAY})),
 )),
 Word.of((
     UnknownNumberOfSigns.of().set_enclosure_type(
         frozenset({EnclosureType.BROKEN_AWAY})),
     BrokenAway.close().set_enclosure_type(
         frozenset({EnclosureType.BROKEN_AWAY})),
)


@pytest.mark.parametrize(  # pyre-ignore[56]
    "atf,expected",
    [
        ("...", Word.of([UnknownNumberOfSigns.of()])),
        ("x", Word.of([UnclearSign.of()])),
        ("X", Word.of([UnidentifiedSign.of()])),
        ("x?", Word.of([UnclearSign.of([atf.Flag.UNCERTAIN])])),
        ("X#", Word.of([UnidentifiedSign.of([atf.Flag.DAMAGE])])),
        ("12", Word.of([Number.of_name("12")])),
        (
            "1]2",
            Word.of([
                Number.of((ValueToken.of("1"), BrokenAway.close(),
                           ValueToken.of("2")))
            ]),
        ),
        (
            "1[2",
            Word.of([
                Number.of((ValueToken.of("1"), BrokenAway.open(),
                           ValueToken.of("2")))
            ]),
        ),
        ("ʾ", Word.of([Reading.of_name("ʾ")])),
        ("du₁₁", Word.of([Reading.of_name("du", 11)])),
        ("GAL", Word.of([Logogram.of_name("GAL")])),
        (
            "kur(GAL)",
            Word.of(