def test_state_dollar_line_range() -> None:
    scope = ScopeContainer(atf.Scope.LINES)
    actual = StateDollarLine(None, (2, 4), scope, atf.State.MISSING, None)

    assert actual.scope == scope
    assert actual.lemmatization == (LemmatizationToken(" 2-4 lines missing"), )
    assert actual.display_value == "2-4 lines missing"
    assert actual.is_end_of is False
def test_state_dollar_line_end_of() -> None:
    scope = ScopeContainer(atf.Surface.OBVERSE)
    actual = StateDollarLine(None, atf.Extent.END_OF, scope, None, None)

    assert actual.scope == scope
    assert actual.lemmatization == (LemmatizationToken(" end of obverse"), )
    assert actual.display_value == "end of obverse"
    assert actual.is_end_of is True
def test_strict_dollar_line_with_none() -> None:
    scope = ScopeContainer(atf.Object.OBJECT, "what")
    actual = StateDollarLine(None, atf.Extent.SEVERAL, scope, None, None)

    assert scope.content == atf.Object.OBJECT
    assert scope.text == "what"

    assert actual.scope == scope
    assert actual.lemmatization == (
        LemmatizationToken(" several object what"), )
    assert actual.atf == "$ several object what"
    assert actual.display_value == "several object what"
    assert actual.is_end_of is False
def test_state_dollar_line_content() -> None:
    scope = ScopeContainer(atf.Surface.OBVERSE)
    actual = StateDollarLine(
        atf.Qualification.AT_LEAST,
        1,
        scope,
        atf.State.BLANK,
        atf.DollarStatus.UNCERTAIN,
    )

    assert actual.scope == scope
    assert actual.lemmatization == (
        LemmatizationToken(" at least 1 obverse blank ?"), )
    assert actual.display_value == "at least 1 obverse blank ?"
    assert actual.is_end_of is False
def test_state_dollar_line() -> None:
    scope = ScopeContainer(atf.Scope.COLUMNS, "")
    actual = StateDollarLine(
        atf.Qualification.AT_LEAST,
        atf.Extent.SEVERAL,
        scope,
        atf.State.BLANK,
        atf.DollarStatus.UNCERTAIN,
    )

    assert actual.qualification == atf.Qualification.AT_LEAST
    assert actual.scope == scope
    assert actual.extent == atf.Extent.SEVERAL
    assert actual.state == atf.State.BLANK
    assert actual.status == atf.DollarStatus.UNCERTAIN
    assert actual.lemmatization == (
        LemmatizationToken(" at least several columns blank ?"), )
    assert actual.atf == "$ at least several columns blank ?"
    assert actual.display_value == "at least several columns blank ?"
    assert actual.is_end_of is False
 def ebl_atf_dollar_line__face(self, text):
     return ScopeContainer(atf.Surface.FACE, str(text))
 def ebl_atf_dollar_line__generic_surface(self, text):
     return ScopeContainer(atf.Surface.SURFACE, str(text))
 def ebl_atf_dollar_line__SURFACE(self, surface):
     return ScopeContainer(atf.Surface.from_atf(str(surface)))
 (
     HeadingAtLine(2, (StringPart("foo"),)),
     {
         "prefix": "@",
         "content": [OneOfTokenSchema().dump(ValueToken.of("h2 foo"))],
         "type": "HeadingAtLine",
         "number": 2,
         "displayValue": "h2 foo",
         "parts": [{"type": "StringPart", "text": "foo"}],
     },
 ),
 (
     StateDollarLine(
         atf.Qualification.AT_LEAST,
         atf.Extent.BEGINNING_OF,
         ScopeContainer(atf.Surface.OBVERSE),
         atf.State.BLANK,
         atf.DollarStatus.UNCERTAIN,
     ),
     {
         "prefix": "$",
         "content": [
             OneOfTokenSchema().dump(
                 ValueToken.of(" at least beginning of obverse blank ?")
             )
         ],
         "type": "StateDollarLine",
         "qualification": "AT_LEAST",
         "extent": "BEGINNING_OF",
         "scope": {"type": "Surface", "content": "OBVERSE", "text": ""},
         "state": "BLANK",
def test_dump_scope_schema():
    scope = ScopeContainer(atf.Surface.OBVERSE, "")
    dump = ScopeContainerSchema().dump(scope)
    assert dump == {"type": "Surface", "content": "OBVERSE", "text": ""}
 def make__scope_container(self, data, **kwargs) -> ScopeContainer:
     return ScopeContainer(self.load_scope(data["type"], data["content"]),
                           data["text"])
class TransliteratedFragmentFactory(FragmentFactory):
    text = Text((
        TextLine.of_iterable(
            LineNumber(1, True),
            (
                Word.of([UnidentifiedSign.of()]),
                Word.of([
                    Logogram.of_name(
                        "BA",
                        surrogate=[
                            Reading.of_name("ku"),
                            Joiner.hyphen(),
                            Reading.of_name("u", 4),
                        ],
                    )
                ]),
                Column.of(),
                Tabulation.of(),
                Word.of([
                    BrokenAway.open(),
                    UnknownNumberOfSigns.of(),
                    Joiner.hyphen(),
                    Reading.of_name("ku"),
                    BrokenAway.close(),
                    Joiner.hyphen(),
                    Reading.of_name("nu"),
                    Joiner.hyphen(),
                    Reading.of_name("ši"),
                ]),
                Variant.of(Divider.of(":"), Reading.of_name("ku")),
                Word.of([
                    BrokenAway.open(),
                    UnknownNumberOfSigns.of(),
                    BrokenAway.close(),
                ]),
                Column.of(2),
                Divider.of(":", ("@v", ), (Flag.DAMAGE, )),
                CommentaryProtocol.of("!qt"),
                Word.of([Number.of_name("10", flags=[Flag.DAMAGE])]),
            ),
        ),
        TextLine.of_iterable(
            LineNumber(2, True),
            (
                Word.of([
                    BrokenAway.open(),
                    UnknownNumberOfSigns.of(),
                    BrokenAway.close(),
                ]),
                Word.of([Logogram.of_name("GI", 6)]),
                Word.of([Reading.of_name("ana")]),
                Word.of([
                    Reading.of_name("u", 4),
                    Joiner.hyphen(),
                    Reading.of((
                        ValueToken.of("š"),
                        BrokenAway.open(),
                        ValueToken.of("u"),
                    )),
                ]),
                Word.of([UnknownNumberOfSigns.of(),
                         BrokenAway.close()]),
            ),
        ),
        TextLine.of_iterable(
            LineNumber(3, True),
            (
                Word.of([BrokenAway.open(),
                         UnknownNumberOfSigns.of()]),
                Word.of([
                    Reading.of((
                        ValueToken.of("k"),
                        BrokenAway.close(),
                        ValueToken.of("i"),
                    )),
                    Joiner.hyphen(),
                    Reading.of_name("du"),
                ]),
                Word.of([Reading.of_name("u")]),
                Word.of([
                    Reading.of_name("ba"),
                    Joiner.hyphen(),
                    Reading.of_name("ma"),
                    Joiner.hyphen(),
                    Reading.of((
                        ValueToken.of("t"),
                        BrokenAway.open(),
                        ValueToken.of("i"),
                    )),
                ]),
                Word.of([UnknownNumberOfSigns.of(),
                         BrokenAway.close()]),
            ),
        ),
        TextLine.of_iterable(
            LineNumber(6, True),
            (
                Word.of([
                    BrokenAway.open(),
                    UnknownNumberOfSigns.of(),
                    BrokenAway.close(),
                ]),
                Word.of([UnclearSign.of([Flag.DAMAGE])]),
                Word.of([Reading.of_name("mu")]),
                Word.of([
                    Reading.of_name("ta"),
                    Joiner.hyphen(),
                    Reading.of_name("ma"),
                    InWordNewline.of(),
                    Joiner.hyphen(),
                    Reading.of_name("tu", 2),
                ]),
            ),
        ),
        TextLine.of_iterable(
            LineNumber(7, True),
            (
                Word.of([
                    Variant.of(Reading.of_name("šu"),
                               CompoundGrapheme.of(["BI×IS"]))
                ]),
                LanguageShift.normalized_akkadian(),
                AkkadianWord.of([ValueToken.of("kur")]),
            ),
        ),
        StateDollarLine(
            atf.Qualification.AT_LEAST,
            1,
            ScopeContainer(atf.Surface.OBVERSE, ""),
            atf.State.MISSING,
            None,
        ),
        ImageDollarLine("1", None, "numbered diagram of triangle"),
        RulingDollarLine(atf.Ruling.SINGLE),
        LooseDollarLine("this is a loose line"),
        SealDollarLine(1),
        SealAtLine(1),
        HeadingAtLine(1),
        ColumnAtLine(ColumnLabel([atf.Status.COLLATION], 1)),
        SurfaceAtLine(
            SurfaceLabel([atf.Status.COLLATION], atf.Surface.SURFACE,
                         "stone wig")),
        ObjectAtLine(
            ObjectLabel([atf.Status.COLLATION], atf.Object.OBJECT,
                        "stone wig")),
        DiscourseAtLine(atf.Discourse.DATE),
        DivisionAtLine("paragraph", 5),
        CompositeAtLine(atf.Composite.DIV, "part", 1),
        NoteLine((
            StringPart("a note "),
            EmphasisPart("italic"),
            LanguagePart.of_transliteration(
                Language.AKKADIAN, (Word.of([Reading.of_name("bu")]), )),
        )),
        ParallelComposition(False, "my name", LineNumber(1)),
        ParallelText(
            True,
            TextId(CorpusGenre.LITERATURE, 1, 1),
            ChapterName(Stage.OLD_BABYLONIAN, "", "my name"),
            LineNumber(1),
            False,
        ),
        ParallelFragment(False, MuseumNumber.of("K.1"), True, Labels(),
                         LineNumber(1), False),
    ))
    signs = (
        "X BA KU ABZ075 ABZ207a\\u002F207b\\u0020X ABZ377n1/KU ABZ377n1 ABZ411\n"
        "MI DIŠ UD ŠU\n"
        "KI DU ABZ411 BA MA TI\n"
        "X MU TA MA UD\n"
        "ŠU/|BI×IS|")
    folios = Folios((Folio("WGL", "3"), Folio("XXX", "3")))
    record = Record((RecordEntry("test", RecordType.TRANSLITERATION), ))
    line_to_vec = ((
        LineToVecEncoding.TEXT_LINE,
        LineToVecEncoding.TEXT_LINE,
        LineToVecEncoding.TEXT_LINE,
        LineToVecEncoding.TEXT_LINE,
        LineToVecEncoding.TEXT_LINE,
        LineToVecEncoding.SINGLE_RULING,
    ), )
def test_scope_container() -> None:
    scope = ScopeContainer(atf.Object.OBJECT, "what")

    assert scope.content == atf.Object.OBJECT
    assert scope.text == "what"
def test_state_dollar_line_non_empty_string_error() -> None:
    with pytest.raises(ValueError):
        StateDollarLine(None, None, ScopeContainer(atf.Surface.REVERSE,
                                                   "test"), None, None)
Exemplo n.º 15
0
EXTENTS = [
    ("145", 145),
    ("143-533", (143, 533)),
    ("143 - 533", (143, 533)),
    ("several", atf.Extent.SEVERAL),
    ("some", atf.Extent.SOME),
    ("rest of", atf.Extent.REST_OF),
    ("start of", atf.Extent.START_OF),
    ("beginning of", atf.Extent.BEGINNING_OF),
    ("middle of", atf.Extent.MIDDLE_OF),
    ("end of", atf.Extent.END_OF),
]

SCOPES = [
    ("obverse", ScopeContainer(atf.Surface.OBVERSE)),
    ("reverse", ScopeContainer(atf.Surface.REVERSE)),
    ("bottom", ScopeContainer(atf.Surface.BOTTOM)),
    ("edge", ScopeContainer(atf.Surface.EDGE)),
    ("left", ScopeContainer(atf.Surface.LEFT)),
    ("right", ScopeContainer(atf.Surface.RIGHT)),
    ("top", ScopeContainer(atf.Surface.TOP)),
    ("surface thing right", ScopeContainer(atf.Surface.SURFACE,
                                           "thing right")),
    ("surface th(in)g", ScopeContainer(atf.Surface.SURFACE, "th(in)g")),
    ("edge a", ScopeContainer(atf.Surface.EDGE, "a")),
    ("face z", ScopeContainer(atf.Surface.FACE, "z")),
    ("tablet", ScopeContainer(atf.Object.TABLET)),
    ("envelope", ScopeContainer(atf.Object.ENVELOPE)),
    ("prism", ScopeContainer(atf.Object.PRISM)),
    ("bulla", ScopeContainer(atf.Object.BULLA)),
 def ebl_atf_dollar_line__edge(self, text=""):
     return ScopeContainer(atf.Surface.EDGE, str(text))
 def ebl_atf_dollar_line__SCOPE(self, scope):
     return ScopeContainer(atf.Scope(str(scope)))
 def ebl_atf_dollar_line__OBJECT(self, object):
     return ScopeContainer(atf.Object(object))
class LemmatizedFragmentFactory(TransliteratedFragmentFactory):
    text = Text((
        TextLine.of_iterable(
            LineNumber(1, True),
            (
                Word.of([UnidentifiedSign.of()]),
                Word.of([
                    Logogram.of_name(
                        "BA",
                        surrogate=[
                            Reading.of_name("ku"),
                            Joiner.hyphen(),
                            Reading.of_name("u", 4),
                        ],
                    )
                ]),
                Column.of(),
                Tabulation.of(),
                Word.of([
                    BrokenAway.open(),
                    UnknownNumberOfSigns.of(),
                    Joiner.hyphen(),
                    Reading.of_name("ku"),
                    BrokenAway.close(),
                    Joiner.hyphen(),
                    Reading.of_name("nu"),
                    Joiner.hyphen(),
                    Reading.of_name("ši"),
                ]),
                Variant.of(Divider.of(":"), Reading.of_name("ku")),
                Word.of([
                    BrokenAway.open(),
                    UnknownNumberOfSigns.of(),
                    BrokenAway.close(),
                ]),
                Column.of(2),
                Divider.of(":", ("@v", ), (Flag.DAMAGE, )),
                CommentaryProtocol.of("!qt"),
                Word.of([Number.of_name("10", flags=[Flag.DAMAGE])]),
            ),
        ),
        TextLine.of_iterable(
            LineNumber(2, True),
            (
                Word.of([BrokenAway.open(),
                         UnknownNumberOfSigns.of()]),
                Word.of([Logogram.of_name("GI", 6)],
                        unique_lemma=(WordId("ginâ I"), )),
                Word.of([Reading.of_name("ana")],
                        unique_lemma=(WordId("ana I"), )),
                Word.of(
                    [
                        Reading.of_name("u₄"),
                        Joiner.hyphen(),
                        Reading.of_name("š[u"),
                    ],
                    unique_lemma=(WordId("ūsu I"), ),
                ),
                Word.of([UnknownNumberOfSigns.of(),
                         BrokenAway.close()]),
            ),
        ),
        TextLine.of_iterable(
            LineNumber(3, True),
            (
                Word.of([BrokenAway.open(),
                         UnknownNumberOfSigns.of()]),
                Word.of(
                    unique_lemma=(WordId("kīdu I"), ),
                    parts=[
                        Reading.of((
                            ValueToken.of("k"),
                            BrokenAway.close(),
                            ValueToken.of("i"),
                        )),
                        Joiner.hyphen(),
                        Reading.of_name("du"),
                    ],
                ),
                Word.of(unique_lemma=(WordId("u I"), ),
                        parts=[Reading.of_name("u")]),
                Word.of(
                    unique_lemma=(WordId("bamātu I"), ),
                    parts=[
                        Reading.of_name("ba"),
                        Joiner.hyphen(),
                        Reading.of_name("ma"),
                        Joiner.hyphen(),
                        Reading.of((
                            ValueToken.of("t"),
                            BrokenAway.open(),
                            ValueToken.of("i"),
                        )),
                    ],
                ),
                Word.of([UnknownNumberOfSigns.of(),
                         BrokenAway.close()]),
            ),
        ),
        TextLine.of_iterable(
            LineNumber(6, True),
            (
                Word.of([
                    BrokenAway.open(),
                    UnknownNumberOfSigns.of(),
                    BrokenAway.close(),
                ]),
                Word.of([UnclearSign.of([Flag.DAMAGE])]),
                Word.of(unique_lemma=(WordId("mu I"), ),
                        parts=[Reading.of_name("mu")]),
                Word.of(
                    unique_lemma=(WordId("tamalāku I"), ),
                    parts=[
                        Reading.of_name("ta"),
                        Joiner.hyphen(),
                        Reading.of_name("ma"),
                        InWordNewline.of(),
                        Joiner.hyphen(),
                        Reading.of_name("tu", 2),
                    ],
                ),
            ),
        ),
        TextLine.of_iterable(
            LineNumber(7, True),
            (
                Word.of([
                    Variant.of(Reading.of_name("šu"),
                               CompoundGrapheme.of(["BI×IS"]))
                ]),
                LanguageShift.normalized_akkadian(),
                AkkadianWord.of([ValueToken.of("kur")],
                                unique_lemma=(WordId("normalized I"), )),
            ),
        ),
        StateDollarLine(
            atf.Qualification.AT_LEAST,
            1,
            ScopeContainer(atf.Surface.OBVERSE, ""),
            atf.State.MISSING,
            None,
        ),
        ImageDollarLine("1", None, "numbered diagram of triangle"),
        RulingDollarLine(atf.Ruling.SINGLE),
        LooseDollarLine("this is a loose line"),
        SealDollarLine(1),
        SealAtLine(1),
        HeadingAtLine(1),
        ColumnAtLine(ColumnLabel([atf.Status.COLLATION], 1)),
        SurfaceAtLine(
            SurfaceLabel([atf.Status.COLLATION], atf.Surface.SURFACE,
                         "stone wig")),
        ObjectAtLine(
            ObjectLabel([atf.Status.COLLATION], atf.Object.OBJECT,
                        "stone wig")),
        DiscourseAtLine(atf.Discourse.DATE),
        DivisionAtLine("paragraph", 5),
        CompositeAtLine(atf.Composite.DIV, "part", 1),
        NoteLine((
            StringPart("a note "),
            EmphasisPart("italic"),
            LanguagePart.of_transliteration(
                Language.AKKADIAN, (Word.of([Reading.of_name("bu")]), )),
        )),
        ParallelComposition(False, "my name", LineNumber(1)),
        ParallelText(
            True,
            TextId(CorpusGenre.LITERATURE, 1, 1),
            ChapterName(Stage.OLD_BABYLONIAN, "", "my name"),
            LineNumber(1),
            False,
        ),
        ParallelFragment(False, MuseumNumber.of("K.1"), True, Labels(),
                         LineNumber(1), False),
    ))
 def ebl_atf_dollar_line__generic_object(self, text):
     return ScopeContainer(atf.Object.OBJECT, str(text))
def test_load_scope_schema():
    load_dict = {"type": "Surface", "content": "OBVERSE", "text": ""}
    scope = ScopeContainerSchema().load(load_dict)
    assert ScopeContainer(atf.Surface.OBVERSE, "") == scope
 def ebl_atf_dollar_line__fragment(self, text):
     return ScopeContainer(atf.Object.FRAGMENT, str(text))
from ebl.transliteration.domain import atf
from ebl.transliteration.domain.dollar_line import ScopeContainer, StateDollarLine
from ebl.transliteration.domain.lark_parser import parse_atf_lark
from ebl.transliteration.domain.text import Text
from ebl.transliteration.domain.transliteration_error import TransliterationError


@pytest.mark.parametrize("prefix", ["$ ", "$"])
@pytest.mark.parametrize("parenthesis", [False, True])
@pytest.mark.parametrize(
    "line,expected_line",
    [
        (
            "2-4 lines missing",
            StateDollarLine(None, (2, 4), ScopeContainer(atf.Scope.LINES),
                            atf.State.MISSING, None),
        ),
        (
            "at least 1 obverse missing",
            StateDollarLine(
                atf.Qualification.AT_LEAST,
                1,
                ScopeContainer(atf.Surface.OBVERSE, ""),
                atf.State.MISSING,
                None,
            ),
        ),
        (
            "2 lines",
            StateDollarLine(None, 2, ScopeContainer(atf.Scope.LINES), None,
            "#first\n \n#second",
            [
                ControlLine("#", "first"),
                EmptyLine(),
                ControlLine("#", "second")
            ],
        ),
        ("&K11111", [ControlLine("&", "K11111")]),
        ("@reverse", [SurfaceAtLine(SurfaceLabel([], atf.Surface.REVERSE))]),
        (
            "$ (end of side)",
            [
                StateDollarLine(
                    None,
                    atf.Extent.END_OF,
                    ScopeContainer(atf.Scope.SIDE, ""),
                    None,
                    None,
                )
            ],
        ),
        ("#some notes", [ControlLine("#", "some notes")]),
        ("=: continuation", [ControlLine("=:", " continuation")]),
    ],
)
def test_parse_atf(line: str, expected_tokens: List[Line]) -> None:
    assert parse_atf_lark(line).lines == Text.of_iterable(
        expected_tokens).lines


@pytest.mark.parametrize(