def test_updating(client, bibliography, sign_repository, signs, text_repository) -> None: allow_signs(signs, sign_repository) chapter = ChapterFactory.build() allow_references(chapter, bibliography) text_repository.create_chapter(chapter) updated_chapter = attr.evolve( chapter, lines=(attr.evolve(chapter.lines[0], number=LineNumber(1, True)), ), parser_version=ATF_PARSER_VERSION, ) body = { "new": [], "deleted": [], "edited": [{ "index": index, "line": line } for index, line in enumerate( create_chapter_dto(updated_chapter)["lines"])], } post_result = client.simulate_post(create_chapter_url(chapter, "/lines"), body=json.dumps(body)) assert post_result.status == falcon.HTTP_OK assert post_result.json == create_chapter_dto(updated_chapter) get_result = client.simulate_get(create_chapter_url(chapter)) assert get_result.status == falcon.HTTP_OK assert get_result.json == create_chapter_dto(updated_chapter)
def test_importing(client, bibliography, sign_repository, signs, text_repository): allow_signs(signs, sign_repository) chapter = ChapterFactory.build() allow_references(chapter, bibliography) text_repository.create_chapter(chapter) next_line_mumber = (cast( TextLine, chapter.lines[0].variants[0].manuscripts[0].line).line_number.number + 1) atf = (f"{chapter.lines[0].number.number+1}. bu\n" f"{chapter.manuscripts[0].siglum} {next_line_mumber}. ...") updated_chapter = attr.evolve( chapter, lines=(*chapter.lines, *parse_chapter(atf, chapter.manuscripts)), signs=("KU ABZ075 ABZ207a\\u002F207b\\u0020X\n\nKU\nABZ075", ), parser_version=ATF_PARSER_VERSION, ) body = {"atf": atf} post_result = client.simulate_post(create_chapter_url(chapter, "/import"), body=json.dumps(body)) assert post_result.status == falcon.HTTP_OK assert post_result.json == create_chapter_dto(updated_chapter) get_result = client.simulate_get(create_chapter_url(chapter)) assert get_result.status == falcon.HTTP_OK assert get_result.json == create_chapter_dto(updated_chapter)
def test_updating_alignment(client, bibliography, sign_repository, signs, text_repository): allow_signs(signs, sign_repository) chapter = ChapterFactory.build() allow_references(chapter, bibliography) text_repository.create_chapter(chapter) alignment = 0 omitted_words = (1, ) updated_chapter = attr.evolve( chapter, lines=(attr.evolve( chapter.lines[0], variants=(attr.evolve( chapter.lines[0].variants[0], manuscripts=(attr.evolve( chapter.lines[0].variants[0].manuscripts[0], line=TextLine.of_iterable( chapter.lines[0].variants[0].manuscripts[0].line. line_number, (Word.of( [ Reading.of_name("ku"), Joiner.hyphen(), BrokenAway.open(), Reading.of_name("nu"), Joiner.hyphen(), Reading.of_name("ši"), BrokenAway.close(), ], alignment=alignment, variant=Word.of( [Logogram.of_name("KU")], language=Language.SUMERIAN, ), ), ), ), omitted_words=omitted_words, ), ), ), ), ), ), ) expected_chapter = ApiChapterSchema().dump(updated_chapter) post_result = client.simulate_post(create_chapter_url( chapter, "/alignment"), body=json.dumps(DTO)) assert post_result.status == falcon.HTTP_OK assert post_result.json == expected_chapter get_result = client.simulate_get(create_chapter_url(chapter)) assert get_result.status == falcon.HTTP_OK assert get_result.json == expected_chapter
def test_get(client, text_repository): chapter = ChapterFactory.build( manuscripts=(ManuscriptFactory.build(id=1, references=tuple()), )) text_repository.create_chapter(chapter) result = client.simulate_get(create_chapter_url(chapter, "/extant_lines")) assert result.status == falcon.HTTP_OK assert result.json == ExtantLinesSchema().dump(chapter)["extantLines"]
def test_import_invalid_entity(client, bibliography, body, expected_status, sign_repository, signs, text_repository): allow_signs(signs, sign_repository) chapter = ChapterFactory.build() allow_references(chapter, bibliography) text_repository.create_chapter(chapter) post_result = client.simulate_post(create_chapter_url(chapter, "/import"), body=json.dumps(body)) assert post_result.status == expected_status
def test_updating_invalid_lemmatization(dto, expected_status, client, bibliography, sign_repository, signs, text_repository): allow_signs(signs, sign_repository) chapter = ChapterFactory.build() allow_references(chapter, bibliography) text_repository.create_chapter(chapter) post_result = client.simulate_post(create_chapter_url( chapter, "/lemmatization"), body=json.dumps(dto)) assert post_result.status == expected_status
def test_updating_strophic_information(client, bibliography, sign_repository, signs, text_repository): allow_signs(signs, sign_repository) chapter = ChapterFactory.build() allow_references(chapter, bibliography) text_repository.create_chapter(chapter) updated_chapter = attr.evolve( chapter, lines=(attr.evolve( chapter.lines[0], is_second_line_of_parallelism=not chapter.lines[0]. is_second_line_of_parallelism, is_beginning_of_section=not chapter.lines[0]. is_beginning_of_section, ), ), parser_version=ATF_PARSER_VERSION, ) body = { "new": [], "deleted": [], "edited": [{ "index": index, "line": line } for index, line in enumerate( create_chapter_dto(updated_chapter)["lines"])], } post_result = client.simulate_post(create_chapter_url(chapter, "/lines"), body=json.dumps(body)) assert post_result.status == falcon.HTTP_OK assert post_result.json == create_chapter_dto(updated_chapter) get_result = client.simulate_get(create_chapter_url(chapter)) assert get_result.status == falcon.HTTP_OK assert get_result.json == create_chapter_dto(updated_chapter)
def test_get(client, text_repository): chapter = ChapterFactory.build( lines=tuple(), manuscripts=( ManuscriptFactory.build(references=tuple()), ManuscriptFactory.build(colophon=Text(), references=tuple()), ManuscriptFactory.build(references=tuple()), ), ) text_repository.create_chapter(chapter) result = client.simulate_get(create_chapter_url(chapter, "/colophons")) assert result.status == falcon.HTTP_OK assert result.json == [{ "siglum": str(manuscript.siglum), "text": TextSchema().dump(manuscript.colophon), } for manuscript in chapter.manuscripts if not manuscript.colophon.is_empty]
def test_updating_lemmatization(client, bibliography, sign_repository, signs, text_repository): allow_signs(signs, sign_repository) chapter: Chapter = ChapterFactory.build() allow_references(chapter, bibliography) text_repository.create_chapter(chapter) updated_chapter = attr.evolve( chapter, lines=(attr.evolve( chapter.lines[0], variants=(attr.evolve( chapter.lines[0].variants[0], reconstruction=( chapter.lines[0].variants[0].reconstruction[0], chapter.lines[0].variants[0].reconstruction[1]. set_unique_lemma( LemmatizationToken( chapter.lines[0].variants[0].reconstruction[1]. value, (WordId("aklu I"), ), )), *chapter.lines[0].variants[0].reconstruction[2:6], chapter.lines[0].variants[0].reconstruction[6]. set_unique_lemma( LemmatizationToken( chapter.lines[0].variants[0].reconstruction[6]. value, tuple(), )), ), manuscripts=(attr.evolve( chapter.lines[0].variants[0].manuscripts[0], line=TextLine.of_iterable( chapter.lines[0].variants[0].manuscripts[0].line. line_number, (Word.of( [ Reading.of_name("ku"), Joiner.hyphen(), BrokenAway.open(), Reading.of_name("nu"), Joiner.hyphen(), Reading.of_name("ši"), BrokenAway.close(), ], unique_lemma=[WordId("aklu I")], ), ), ), ), ), ), ), ), ), ) expected = create_chapter_dto(updated_chapter) post_result = client.simulate_post(create_chapter_url( chapter, "/lemmatization"), body=json.dumps(DTO)) assert post_result.status == falcon.HTTP_OK assert post_result.json == expected get_result = client.simulate_get(create_chapter_url(chapter)) assert get_result.status == falcon.HTTP_OK assert get_result.json == expected
def url(chapter: Chapter) -> str: return create_chapter_url(chapter, "/display")
def url(chapter: Chapter) -> str: return create_chapter_url(chapter, "/lines")