def test_inject_document_in_fragment_infos(fragment_finder, when, bibliography): bibliography_entry = BibliographyEntryFactory.build() fragment_1 = FragmentInfo.of( FragmentFactory.build( number="K.1", references=(ReferenceFactory.build(id="RN.0"), ))) fragment_2 = FragmentInfo.of( FragmentFactory.build( number="K.2", references=( ReferenceFactory.build(id="RN.1"), ReferenceFactory.build(id="RN.2"), ), )) fragment_expected_1 = fragment_1.set_references( [fragment_1.references[0].set_document(bibliography_entry)]) fragment_expected_2 = fragment_2.set_references([ fragment_2.references[0].set_document(bibliography_entry), fragment_2.references[1].set_document(bibliography_entry), ]) (when(fragment_finder).search_references("id", "pages").thenReturn( [fragment_1, fragment_2])) (when(bibliography).find("RN.0").thenReturn(bibliography_entry)) (when(bibliography).find("RN.1").thenReturn(bibliography_entry)) (when(bibliography).find("RN.2").thenReturn(bibliography_entry)) assert fragment_finder.search_references_in_fragment_infos( "id", "pages") == [ fragment_expected_1, fragment_expected_2, ]
def test_search_references(client, fragmentarium, bibliography, user): bib_entry_1 = BibliographyEntryFactory.build(id="RN.0", pages="254") bib_entry_2 = BibliographyEntryFactory.build(id="RN.1") bibliography.create(bib_entry_1, user) bibliography.create(bib_entry_2, user) fragment = FragmentFactory.build(references=( ReferenceFactory.build(id="RN.0", pages="254"), ReferenceFactory.build(id="RN.1"), )) fragmentarium.create(fragment) result = client.simulate_get( "/fragments", params={ "id": fragment.references[0].id, "pages": fragment.references[0].pages }, ) assert result.status == falcon.HTTP_OK fragment_expected = fragment.set_references([ fragment.references[0].set_document(bib_entry_1), fragment.references[1].set_document(bib_entry_2), ]) assert result.json == ApiFragmentInfoSchema(many=True).dump( [FragmentInfo.of(fragment_expected)]) assert "Cache-Control" not in result.headers
def test_search_reference_id_and_pages(pages, database, fragment_repository): fragment = FragmentFactory.build( references=(ReferenceFactory.build(pages=pages), ReferenceFactory.build()) ) database[COLLECTION].insert_one(SCHEMA.dump(fragment)) assert ( fragment_repository.query_by_id_and_page_in_references( fragment.references[0].id, "163" ) ) == [fragment]
def test_fragment_search_references(fragment_finder, fragment_repository, when): fragment = FragmentFactory.build(references=(ReferenceFactory(), ReferenceFactory())) references_id = fragment.references[0].id references_pages = fragment.references[0].pages (when(fragment_repository).query_by_id_and_page_in_references( references_id, references_pages).thenReturn([fragment])) assert fragment_finder.search_references( references_id, references_pages) == [FragmentInfo.of(fragment)]
def test_search_references_invalid_query(client, fragmentarium): fragment = FragmentFactory.build(references=(ReferenceFactory.build(), ReferenceFactory.build())) fragmentarium.create(fragment) reference_id = fragment.references[0].id reference_pages = "should be a number" result = client.simulate_get("/fragments", params={ "id": reference_id, "pages": reference_pages }) assert result.status == falcon.HTTP_UNPROCESSABLE_ENTITY
def test_deserialize_manuscript() -> None: references = (ReferenceFactory.build(with_document=False),) manuscript = ManuscriptFactory.build(references=references) assert ( ApiManuscriptSchema().load( { "id": manuscript.id, "siglumDisambiguator": manuscript.siglum_disambiguator, "museumNumber": str(manuscript.museum_number) if manuscript.museum_number else "", "accession": manuscript.accession, "periodModifier": manuscript.period_modifier.value, "period": manuscript.period.long_name, "provenance": manuscript.provenance.long_name, "type": manuscript.type.long_name, "notes": manuscript.notes, "colophon": manuscript.colophon.atf, "unplacedLines": manuscript.unplaced_lines.atf, "references": ApiReferenceSchema().dump( manuscript.references, many=True ), } ) == manuscript )
def test_update_references( client, fragmentarium, bibliography, parallel_line_injector, user ): fragment = FragmentFactory.build() fragmentarium.create(fragment) reference = ReferenceFactory.build(with_document=True) bibliography.create(reference.document, ANY_USER) references = [ReferenceSchema().dump(reference)] body = json.dumps({"references": references}) url = f"/fragments/{fragment.number}/references" post_result = client.simulate_post(url, body=body) expected_json = create_response_dto( fragment.set_references((reference,)).set_text( parallel_line_injector.inject_transliteration(fragment.text) ), user, fragment.number == MuseumNumber("K", "1"), ) assert post_result.status == falcon.HTTP_OK assert post_result.json == expected_json get_result = client.simulate_get(f"/fragments/{fragment.number}") assert get_result.json == expected_json
def test_update_references( fragment_updater, bibliography, user, fragment_repository, parallel_line_injector, changelog, when, ): fragment = FragmentFactory.build() number = fragment.number reference = ReferenceFactory.build() references = (reference,) updated_fragment = fragment.set_references(references) injected_fragment = updated_fragment.set_text( parallel_line_injector.inject_transliteration(updated_fragment.text) ) when(bibliography).find(reference.id).thenReturn(reference) when(fragment_repository).query_by_museum_number(number).thenReturn( fragment ).thenReturn(updated_fragment) when(fragment_repository).update_references(updated_fragment).thenReturn() when(changelog).create( "fragments", user.profile, {"_id": str(number), **SCHEMA.dump(fragment)}, {"_id": str(number), **SCHEMA.dump(updated_fragment)}, ).thenReturn() result = fragment_updater.update_references(number, references, user) assert result == (injected_fragment, False)
def test_query_by_museum_number_references( database, fragment_repository, bibliography_repository ): reference = ReferenceFactory.build(with_document=True) fragment = LemmatizedFragmentFactory.build(references=(reference,)) database[COLLECTION].insert_one(FragmentSchema(exclude=["joins"]).dump(fragment)) bibliography_repository.create(reference.document) assert fragment_repository.query_by_museum_number(fragment.number) == fragment
def test_update_references_invalid_id(client, fragmentarium): reference = ReferenceFactory.build(with_document=True) fragment = FragmentFactory.build() fragmentarium.create(fragment) body = json.dumps({"references": [ReferenceSchema().dump(reference)]}) url = f"/fragments/{fragment.number}/references" post_result = client.simulate_post(url, body=body) assert post_result.status == falcon.HTTP_UNPROCESSABLE_ENTITY
def test_validate_references_invalid(bibliography_repository, bibliography, user, changelog, when): valid_reference = ReferenceFactory.build(with_document=True) first_invalid = ReferenceFactory.build(with_document=True) second_invalid = ReferenceFactory.build(with_document=True) bibliography.create(valid_reference.document, user) (when(bibliography).find(valid_reference.id).thenReturn(valid_reference)) (when(bibliography).find(first_invalid.id).thenRaise(NotFoundError)) (when(bibliography).find(second_invalid.id).thenRaise(NotFoundError)) expected_error = ("Unknown bibliography entries: " f"{first_invalid.id}" ", " f"{second_invalid.id}" ".") with pytest.raises(DataError, match=expected_error): bibliography.validate_references( [first_invalid, valid_reference, second_invalid])
def test_update_references(fragment_repository): reference = ReferenceFactory.build() fragment = FragmentFactory.build() fragment_repository.create(fragment) references = (reference,) updated_fragment = fragment.set_references(references) fragment_repository.update_references(updated_fragment) result = fragment_repository.query_by_museum_number(fragment.number) assert result == updated_fragment
def test_update_references_invalid( fragment_updater, bibliography, user, fragment_repository, when ): fragment = FragmentFactory.build() number = fragment.number reference = ReferenceFactory.build() when(bibliography).find(reference.id).thenRaise(NotFoundError) (when(fragment_repository).query_by_museum_number(number).thenReturn(fragment)) references = (reference,) with pytest.raises(DataError): fragment_updater.update_references(number, references, user)
def test_set_references(): fragment = FragmentFactory.build() references = (ReferenceFactory.build(), ) updated_fragment = fragment.set_references(references) assert updated_fragment.references == references
TranslationLineSchema, ) from ebl.transliteration.application.note_line_part_schemas import ( OneOfNoteLinePartSchema, ) from ebl.transliteration.application.one_of_line_schema import ( OneOfLineSchema, ParallelLineSchema, ) from ebl.transliteration.application.text_schema import TextSchema from ebl.transliteration.application.token_schemas import OneOfTokenSchema from ebl.transliteration.domain.line_number import LineNumber from ebl.transliteration.domain.markup import StringPart from ebl.transliteration.domain.parallel_line import ParallelComposition from ebl.transliteration.domain.translation_line import TranslationLine REFERENCES = (ReferenceFactory.build(with_document=True), ) MANUSCRIPT = ManuscriptFactory.build(references=REFERENCES) UNCERTAIN_FRAGMENTS = (MuseumNumber.of("K.1"), ) FIRST_MANUSCRIPT_LINE = ManuscriptLineFactory.build( manuscript_id=MANUSCRIPT.id) SECOND_MANUSCRIPT_LINE = ManuscriptLineFactory.build( manuscript_id=MANUSCRIPT.id) LINE_VARIANT = LineVariantFactory.build( manuscripts=(FIRST_MANUSCRIPT_LINE, SECOND_MANUSCRIPT_LINE), parallel_lines=(ParallelComposition(False, "name", LineNumber(2)), ), intertext=(StringPart("bar"), ), ) TRANSLATION_LINE = TranslationLine((StringPart("foo"), ), "en", None) LINE = LineFactory.build(variants=(LINE_VARIANT, ), translation=(TRANSLATION_LINE, ))
def test_validate_references(bibliography_repository, bibliography, user, changelog, when): reference = ReferenceFactory.build(with_document=True) (when(bibliography).find(reference.id).thenReturn(reference)) bibliography.validate_references([reference])
CHAPTER_NAME = "IIc" ORDER = 1 MANUSCRIPT_ID = 9001 SIGLUM_DISAMBIGUATOR = "1c" MUSEUM_NUMBER = MuseumNumber("BM", "x") ACCESSION = "" PERIOD_MODIFIER = PeriodModifier.LATE PERIOD = Period.OLD_BABYLONIAN PROVENANCE = Provenance.NINEVEH TYPE = ManuscriptType.LIBRARY NOTES = "some notes" COLOPHON = Transliteration.of_iterable( [TextLine(LineNumber(1, True), (Word.of([Reading.of_name("ku")]), ))]) UNPLACED_LINES = Transliteration.of_iterable( [TextLine(LineNumber(4, True), (Word.of([Reading.of_name("bu")]), ))]) REFERENCES = (ReferenceFactory.build(), ) LINE_NUMBER = LineNumber(1) LINE_RECONSTRUCTION = (AkkadianWord.of((ValueToken.of("buāru"), )), ) IS_SECOND_LINE_OF_PARALLELISM = True IS_BEGINNING_OF_SECTION = True LABELS = (SurfaceLabel.from_label(Surface.OBVERSE), ) PARATEXT = (NoteLine((StringPart("note"), )), RulingDollarLine(Ruling.SINGLE)) OMITTED_WORDS = (1, ) NOTE = None PARALLEL_LINES = (ParallelComposition(False, "a composition", LineNumber(7)), ) TRANSLATION = (TranslationLine((StringPart("foo"), ), "en", None), ) SIGNS = ("FOO BAR", ) MANUSCRIPT_TEXT_1 = TextLine(LineNumber(1), (Word.of([Reading.of([ValueToken.of("ku")])]), ))
import attr from ebl.fragmentarium.domain.fragment_info import FragmentInfo from ebl.fragmentarium.domain.record import Record, RecordEntry, RecordType from ebl.tests.factories.bibliography import ReferenceFactory from ebl.tests.factories.fragment import FragmentFactory FRAGMENT = FragmentFactory.build() FRAGMENT_WITH_REFERENCES = FragmentFactory.build( references=(ReferenceFactory.build(), ReferenceFactory.build())) def test_of(): matching_lines = (("1. kur", ), ) assert FragmentInfo.of(FRAGMENT, matching_lines) == FragmentInfo( FRAGMENT.number, FRAGMENT.accession, FRAGMENT.script, FRAGMENT.description, matching_lines, "", "", ) def test_of_with_references(): matching_lines = (("1. kur", ), ) assert FragmentInfo.of(FRAGMENT_WITH_REFERENCES, matching_lines) == FragmentInfo( FRAGMENT_WITH_REFERENCES.number, FRAGMENT_WITH_REFERENCES.accession,
def create(include_documents: bool) -> Tuple[Chapter, dict]: references = (ReferenceFactory.build(with_document=include_documents),) manuscript = ManuscriptFactory.build(references=references) first_manuscript_line = ManuscriptLineFactory.build(manuscript_id=manuscript.id) second_manuscript_line = ManuscriptLineFactory.build(manuscript_id=manuscript.id) line = LineFactory.build( variants=( LineVariantFactory.build( manuscripts=(first_manuscript_line, second_manuscript_line), parallel_lines=(ParallelComposition(False, "name", LineNumber(1)),), ), ) ) chapter = ChapterFactory.build( manuscripts=(manuscript,), uncertain_fragments=(MuseumNumber.of("K.1"),), lines=(line,), ) dto = { "textId": { "genre": chapter.text_id.genre.value, "category": chapter.text_id.category, "index": chapter.text_id.index, }, "classification": chapter.classification.value, "stage": chapter.stage.value, "version": chapter.version, "name": chapter.name, "order": chapter.order, "signs": list(chapter.signs), "record": RecordSchema().dump(chapter.record), "parserVersion": chapter.parser_version, "manuscripts": ApiManuscriptSchema( exclude=[] if include_documents else ["joins"] ).dump(chapter.manuscripts, many=True), "uncertainFragments": [str(number) for number in chapter.uncertain_fragments], "lines": [ { "number": line.number.label, "variants": [ { "reconstruction": "".join( [ convert_to_atf(None, variant.reconstruction), f"\n{variant.note.atf}" if variant.note else "", *[ f"\n{parallel_line.atf}" for parallel_line in variant.parallel_lines ], ] ), "reconstructionTokens": OneOfTokenSchema().dump( variant.reconstruction, many=True ), "intertext": "".join(part.value for part in variant.intertext), "manuscripts": [ { "manuscriptId": manuscript_line.manuscript_id, "labels": [ label.to_value() for label in manuscript_line.labels ], "number": manuscript_line.line.line_number.atf[:-1] if isinstance(manuscript_line.line, TextLine) else "", "atf": "\n".join( [ manuscript_line.line.atf[ len(manuscript_line.line.line_number.atf) + 1 : ] if isinstance(manuscript_line.line, TextLine) else "", *[ line.atf for line in manuscript_line.paratext ], ] ).strip(), "atfTokens": ( OneOfLineSchema().dump(manuscript_line.line)[ "content" ] ), "omittedWords": list(manuscript_line.omitted_words), } for manuscript_line in variant.manuscripts ], } for variant in line.variants ], "isSecondLineOfParallelism": line.is_second_line_of_parallelism, "isBeginningOfSection": line.is_beginning_of_section, "translation": "\n".join( translation.atf for translation in line.translation ), } for line in chapter.lines ], } return chapter, dto