Пример #1
0
def test_updating(client, bibliography, sign_repository, signs,
                  text_repository) -> None:
    allow_signs(signs, sign_repository)
    chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)
    updated_chapter = attr.evolve(
        chapter,
        lines=(attr.evolve(chapter.lines[0], number=LineNumber(1, True)), ),
        parser_version=ATF_PARSER_VERSION,
    )

    body = {
        "new": [],
        "deleted": [],
        "edited": [{
            "index": index,
            "line": line
        } for index, line in enumerate(
            create_chapter_dto(updated_chapter)["lines"])],
    }
    post_result = client.simulate_post(create_chapter_url(chapter, "/lines"),
                                       body=json.dumps(body))

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == create_chapter_dto(updated_chapter)

    get_result = client.simulate_get(create_chapter_url(chapter))

    assert get_result.status == falcon.HTTP_OK
    assert get_result.json == create_chapter_dto(updated_chapter)
Пример #2
0
def test_importing(client, bibliography, sign_repository, signs,
                   text_repository):
    allow_signs(signs, sign_repository)
    chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)
    next_line_mumber = (cast(
        TextLine,
        chapter.lines[0].variants[0].manuscripts[0].line).line_number.number +
                        1)
    atf = (f"{chapter.lines[0].number.number+1}. bu\n"
           f"{chapter.manuscripts[0].siglum} {next_line_mumber}. ...")

    updated_chapter = attr.evolve(
        chapter,
        lines=(*chapter.lines, *parse_chapter(atf, chapter.manuscripts)),
        signs=("KU ABZ075 ABZ207a\\u002F207b\\u0020X\n\nKU\nABZ075", ),
        parser_version=ATF_PARSER_VERSION,
    )

    body = {"atf": atf}
    post_result = client.simulate_post(create_chapter_url(chapter, "/import"),
                                       body=json.dumps(body))

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == create_chapter_dto(updated_chapter)

    get_result = client.simulate_get(create_chapter_url(chapter))

    assert get_result.status == falcon.HTTP_OK
    assert get_result.json == create_chapter_dto(updated_chapter)
Пример #3
0
def test_invalid_line(client, text_repository, chapter, bibliography, url):
    text_repository.create(text)
    text_repository.create_chapter(chapter)
    allow_references(chapter, bibliography)

    result = client.simulate_get(f"{url}/invalid")

    assert result.status == falcon.HTTP_NOT_FOUND
Пример #4
0
def test_line_not_found(client, text_repository, chapter, bibliography, url):
    text_repository.create(text)
    text_repository.create_chapter(chapter)
    allow_references(chapter, bibliography)

    result = client.simulate_get(f"{url}/{len(chapter.lines)}")

    assert result.status == falcon.HTTP_NOT_FOUND
Пример #5
0
def test_get(client, text_repository, text, chapter, bibliography, url):
    text_repository.create(text)
    text_repository.create_chapter(chapter)
    allow_references(chapter, bibliography)
    schema = LineDetailsSchema(context={"manuscripts": chapter.manuscripts})

    get_result = client.simulate_get(f"{url}/0")

    assert get_result.status == falcon.HTTP_OK
    assert get_result.json == schema.dump(chapter.lines[0])
Пример #6
0
def test_updating_alignment(client, bibliography, sign_repository, signs,
                            text_repository):
    allow_signs(signs, sign_repository)
    chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)
    alignment = 0
    omitted_words = (1, )
    updated_chapter = attr.evolve(
        chapter,
        lines=(attr.evolve(
            chapter.lines[0],
            variants=(attr.evolve(
                chapter.lines[0].variants[0],
                manuscripts=(attr.evolve(
                    chapter.lines[0].variants[0].manuscripts[0],
                    line=TextLine.of_iterable(
                        chapter.lines[0].variants[0].manuscripts[0].line.
                        line_number,
                        (Word.of(
                            [
                                Reading.of_name("ku"),
                                Joiner.hyphen(),
                                BrokenAway.open(),
                                Reading.of_name("nu"),
                                Joiner.hyphen(),
                                Reading.of_name("ši"),
                                BrokenAway.close(),
                            ],
                            alignment=alignment,
                            variant=Word.of(
                                [Logogram.of_name("KU")],
                                language=Language.SUMERIAN,
                            ),
                        ), ),
                    ),
                    omitted_words=omitted_words,
                ), ),
            ), ),
        ), ),
    )

    expected_chapter = ApiChapterSchema().dump(updated_chapter)

    post_result = client.simulate_post(create_chapter_url(
        chapter, "/alignment"),
                                       body=json.dumps(DTO))

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == expected_chapter

    get_result = client.simulate_get(create_chapter_url(chapter))

    assert get_result.status == falcon.HTTP_OK
    assert get_result.json == expected_chapter
Пример #7
0
def test_import_invalid_entity(client, bibliography, body, expected_status,
                               sign_repository, signs, text_repository):
    allow_signs(signs, sign_repository)
    chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)

    post_result = client.simulate_post(create_chapter_url(chapter, "/import"),
                                       body=json.dumps(body))

    assert post_result.status == expected_status
Пример #8
0
def test_updating_invalid_lemmatization(dto, expected_status, client,
                                        bibliography, sign_repository, signs,
                                        text_repository):
    allow_signs(signs, sign_repository)
    chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)
    post_result = client.simulate_post(create_chapter_url(
        chapter, "/lemmatization"),
                                       body=json.dumps(dto))

    assert post_result.status == expected_status
Пример #9
0
def test_searching_texts(client, bibliography, sign_repository, signs,
                         text_repository):
    allow_signs(signs, sign_repository)
    chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)

    get_result = client.simulate_get("/textsearch?transliteration=ku")

    assert get_result.status == falcon.HTTP_OK
    assert get_result.json == [
        ChapterInfoSchema().dump(
            ChapterInfo.of(chapter, TransliterationQuery([["KU"]])))
    ]
Пример #10
0
def test_updating_invalid_stage(client, bibliography, sign_repository, signs,
                                text_repository):
    allow_signs(signs, sign_repository)
    chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)

    post_result = client.simulate_post(
        f"/texts/{chapter.text_id.genre.value}"
        f"/{chapter.text_id.category}/{chapter.text_id.index}"
        "/chapters/invalid/any/lemmatization",
        body=json.dumps(DTO),
    )

    assert post_result.status == falcon.HTTP_NOT_FOUND
Пример #11
0
def test_updating_strophic_information(client, bibliography, sign_repository,
                                       signs, text_repository):
    allow_signs(signs, sign_repository)
    chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)
    updated_chapter = attr.evolve(
        chapter,
        lines=(attr.evolve(
            chapter.lines[0],
            is_second_line_of_parallelism=not chapter.lines[0].
            is_second_line_of_parallelism,
            is_beginning_of_section=not chapter.lines[0].
            is_beginning_of_section,
        ), ),
        parser_version=ATF_PARSER_VERSION,
    )

    body = {
        "new": [],
        "deleted": [],
        "edited": [{
            "index": index,
            "line": line
        } for index, line in enumerate(
            create_chapter_dto(updated_chapter)["lines"])],
    }
    post_result = client.simulate_post(create_chapter_url(chapter, "/lines"),
                                       body=json.dumps(body))

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == create_chapter_dto(updated_chapter)

    get_result = client.simulate_get(create_chapter_url(chapter))

    assert get_result.status == falcon.HTTP_OK
    assert get_result.json == create_chapter_dto(updated_chapter)
Пример #12
0
def test_updating_lemmatization(client, bibliography, sign_repository, signs,
                                text_repository):
    allow_signs(signs, sign_repository)
    chapter: Chapter = ChapterFactory.build()
    allow_references(chapter, bibliography)
    text_repository.create_chapter(chapter)
    updated_chapter = attr.evolve(
        chapter,
        lines=(attr.evolve(
            chapter.lines[0],
            variants=(attr.evolve(
                chapter.lines[0].variants[0],
                reconstruction=(
                    chapter.lines[0].variants[0].reconstruction[0],
                    chapter.lines[0].variants[0].reconstruction[1].
                    set_unique_lemma(
                        LemmatizationToken(
                            chapter.lines[0].variants[0].reconstruction[1].
                            value,
                            (WordId("aklu I"), ),
                        )),
                    *chapter.lines[0].variants[0].reconstruction[2:6],
                    chapter.lines[0].variants[0].reconstruction[6].
                    set_unique_lemma(
                        LemmatizationToken(
                            chapter.lines[0].variants[0].reconstruction[6].
                            value,
                            tuple(),
                        )),
                ),
                manuscripts=(attr.evolve(
                    chapter.lines[0].variants[0].manuscripts[0],
                    line=TextLine.of_iterable(
                        chapter.lines[0].variants[0].manuscripts[0].line.
                        line_number,
                        (Word.of(
                            [
                                Reading.of_name("ku"),
                                Joiner.hyphen(),
                                BrokenAway.open(),
                                Reading.of_name("nu"),
                                Joiner.hyphen(),
                                Reading.of_name("ši"),
                                BrokenAway.close(),
                            ],
                            unique_lemma=[WordId("aklu I")],
                        ), ),
                    ),
                ), ),
            ), ),
        ), ),
    )

    expected = create_chapter_dto(updated_chapter)

    post_result = client.simulate_post(create_chapter_url(
        chapter, "/lemmatization"),
                                       body=json.dumps(DTO))

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == expected

    get_result = client.simulate_get(create_chapter_url(chapter))

    assert get_result.status == falcon.HTTP_OK
    assert get_result.json == expected