def test_joins_lowest(): joins = Joins( ( (Join(MuseumNumber("B", "2"), is_in_fragmentarium=True),), ( Join(MuseumNumber("B", "1"), is_in_fragmentarium=True), Join(MuseumNumber("A", "1"), is_in_fragmentarium=False), ), ) ) assert_that(joins.lowest, equal_to(MuseumNumber("B", "1")))
def test_update_transliteration_not_lowest_join(client, fragment_repository) -> None: number = MuseumNumber("X", "2") fragment = FragmentFactory.build(number=number) fragment_repository.create_join([[Join(number)], [Join(MuseumNumber("X", "1"))]]) updates = {"transliteration": "1. kururu", "notes": ""} body = json.dumps(updates) url = f"/fragments/{fragment.number}/transliteration" post_result = client.simulate_post(url, body=body) assert post_result.status == falcon.HTTP_UNPROCESSABLE_ENTITY assert post_result.json == { "title": "422 Unprocessable Entity", "description": "Invalid transliteration", "errors": [{ "description": "Invalid value", "lineNumber": 1 }], }
def test_add_lowest_join_transliteration(user): fragment = FragmentFactory.build( number=MuseumNumber.of("X.2"), joins=Joins([[Join(MuseumNumber.of("X.1"), is_in_fragmentarium=True)]]), ) atf = Atf("1. x x") text = parse_atf_lark(atf) transliteration = TransliterationUpdate(text, fragment.notes) with pytest.raises(NotLowestJoinError): fragment.update_lowest_join_transliteration(transliteration, user)
def test_join_default() -> None: defaults = { "is_checked": False, "joined_by": "", "date": "", "note": "", "legacy_data": "", } join = Join(MuseumNumber("X", "1")) assert_that(join, has_properties(defaults))
def test_query_by_museum_number_joins(database, fragment_repository): museum_number = MuseumNumber("X", "1") first_join = Join(museum_number, is_in_fragmentarium=True) second_join = Join(MuseumNumber("X", "2"), is_in_fragmentarium=False) fragment = LemmatizedFragmentFactory.build( number=museum_number, joins=Joins(((first_join,), (second_join,))) ) database[COLLECTION].insert_one(FragmentSchema(exclude=["joins"]).dump(fragment)) database[JOINS_COLLECTION].insert_one( { "fragments": [ { **JoinSchema(exclude=["is_in_fragmentarium"]).dump(first_join), "group": 0, }, { **JoinSchema(exclude=["is_in_fragmentarium"]).dump(second_join), "group": 1, }, ] } ) assert fragment_repository.query_by_museum_number(fragment.number) == fragment
def test_query_manuscripts_with_joins_by_chapter(database, text_repository) -> None: when_chapter_in_collection(database) join = Join(MUSEUM_NUMBER) database[JOINS_COLLECTION].insert_one({ "fragments": [{ **JoinSchema(exclude=["is_in_fragmentarium"]).dump(join), "group": 0 }] }) assert text_repository.query_manuscripts_with_joins_by_chapter( CHAPTER.id_) == [ attr.evolve(CHAPTER.manuscripts[0], joins=Joins(((join, ), ))) ]
def test_update_transliteration( number, ignore_lowest_join, fragment_updater, user, fragment_repository, changelog, parallel_line_injector, when, ): transliterated_fragment = TransliteratedFragmentFactory.build( number=number, joins=Joins([[Join(MuseumNumber.of("X.1"), is_in_fragmentarium=True)]]), line_to_vec=None, ) number = transliterated_fragment.number atf = Atf("1. x x\n2. x") transliteration = TransliterationUpdate( parse_atf_lark(atf), "updated notes", "X X\nX" ) transliterated_fragment = transliterated_fragment.update_transliteration( transliteration, user ) injected_fragment = transliterated_fragment.set_text( parallel_line_injector.inject_transliteration(transliterated_fragment.text) ) ( when(fragment_repository) .query_by_museum_number(number) .thenReturn(transliterated_fragment) ) when(changelog).create( "fragments", user.profile, {"_id": str(number), **SCHEMA.dump(transliterated_fragment)}, {"_id": str(number), **SCHEMA.dump(transliterated_fragment)}, ).thenReturn() ( when(fragment_repository) .update_transliteration(transliterated_fragment) .thenReturn() ) result = fragment_updater.update_transliteration( number, transliteration, user, ignore_lowest_join ) assert result == (injected_fragment, False)
def test_joins_fragments_sorting(): joins = Joins( ( (Join(MuseumNumber("B", "2")), Join(MuseumNumber("B", "1"))), (Join(MuseumNumber("Z", "0")), Join(MuseumNumber("A", "3"))), ) ) assert_that( joins.fragments, contains_exactly( contains_exactly( Join(MuseumNumber("A", "3")), Join(MuseumNumber("Z", "0")) ), contains_exactly( Join(MuseumNumber("B", "1")), Join(MuseumNumber("B", "2")) ), ), )
def test_update_update_transliteration_not_lowest_join( fragment_updater, user, fragment_repository, when ): number = MuseumNumber.of("X.2") transliterated_fragment = TransliteratedFragmentFactory.build( number=number, joins=Joins([[Join(MuseumNumber.of("X.1"), is_in_fragmentarium=True)]]), ) ( when(fragment_repository) .query_by_museum_number(number) .thenReturn(transliterated_fragment) ) with pytest.raises(NotLowestJoinError): fragment_updater.update_transliteration( number, TransliterationUpdate(parse_atf_lark("1. x"), "updated notes", "X"), user, False, )
def test_join() -> None: museum_number = MuseumNumber("X", "1") is_checked = True joined_by = "Test User" date = "today" note = "test join" legacy_data = "old stuff" join = Join(museum_number, is_checked, joined_by, date, note, legacy_data) assert_that( join, has_properties( { "museum_number": museum_number, "is_checked": is_checked, "joined_by": joined_by, "date": date, "note": note, "legacy_data": legacy_data, } ), )
def test_serialization_and_deserialization(): fragment = LemmatizedFragmentFactory.build( joins=Joins(((Join(MuseumNumber("X", "1")), ), ))) schema = FragmentSchema() data = schema.dump(fragment) assert schema.load(data) == fragment
def make_join(self, data, **kwargs): return Join(**data)