Пример #1
0
def test_update_references(
    client, fragmentarium, bibliography, parallel_line_injector, user
):
    fragment = FragmentFactory.build()
    fragmentarium.create(fragment)
    reference = ReferenceFactory.build(with_document=True)
    bibliography.create(reference.document, ANY_USER)
    references = [ReferenceSchema().dump(reference)]
    body = json.dumps({"references": references})
    url = f"/fragments/{fragment.number}/references"
    post_result = client.simulate_post(url, body=body)

    expected_json = create_response_dto(
        fragment.set_references((reference,)).set_text(
            parallel_line_injector.inject_transliteration(fragment.text)
        ),
        user,
        fragment.number == MuseumNumber("K", "1"),
    )

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == expected_json

    get_result = client.simulate_get(f"/fragments/{fragment.number}")
    assert get_result.json == expected_json
Пример #2
0
def test_update_lemmatization(client, fragmentarium, user, database):
    transliterated_fragment = TransliteratedFragmentFactory.build()
    fragmentarium.create(transliterated_fragment)
    tokens = [
        list(line)
        for line in transliterated_fragment.text.lemmatization.tokens
    ]
    tokens[1][3] = LemmatizationToken(tokens[1][3].value, ("aklu I", ))
    lemmatization = Lemmatization(tokens)
    body = LemmatizationSchema().dumps(lemmatization)
    url = f"/fragments/{transliterated_fragment.number}/lemmatization"
    post_result = client.simulate_post(url, body=body)

    expected_json = create_response_dto(
        transliterated_fragment.update_lemmatization(lemmatization),
        user,
        transliterated_fragment.number == MuseumNumber("K", "1"),
    )

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == expected_json

    get_result = client.simulate_get(
        f"/fragments/{transliterated_fragment.number}")
    assert get_result.json == expected_json

    assert database["changelog"].find_one({
        "resource_id":
        str(transliterated_fragment.number),
        "resource_type":
        "fragments",
        "user_profile.name":
        user.profile["name"],
    })
def test_update_genres(client, fragmentarium, user, database, parameters):
    fragment = FragmentFactory.build(genres=parameters["currentGenres"])
    fragment_number = fragmentarium.create(fragment)
    updates = {"genres": GenreSchema().dump(parameters["newGenres"], many=True)}
    post_result = client.simulate_post(
        f"/fragments/{fragment_number}/genres", body=json.dumps(updates)
    )
    expected_json = {
        **create_response_dto(
            fragment.set_genres(updates["genres"]), user, fragment.number == "K.1"
        )
    }

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == expected_json

    get_result = client.simulate_get(f"/fragments/{fragment_number}")
    assert get_result.json == expected_json

    assert database["changelog"].find_one(
        {
            "resource_id": fragment_number,
            "resource_type": "fragments",
            "user_profile.name": user.profile["name"],
        }
    )
Пример #4
0
 def on_post(self, req: Request, resp: Response, number: str) -> None:
     try:
         user = req.context.user
         updated_fragment, has_photo = self._updater.update_genres(
             parse_museum_number(number),
             GenreSchema().load(req.media["genres"], many=True),
             user,
         )
         resp.media = create_response_dto(updated_fragment, user, has_photo)
     except ValueError as error:
         raise DataError(error)
def test_update_transliteration_merge_lemmatization(
    new_transliteration,
    client,
    fragmentarium,
    signs,
    sign_repository,
    transliteration_factory,
    parallel_line_injector,
    user,
):

    for sign in signs:
        sign_repository.create(sign)
    lemmatized_fragment = LemmatizedFragmentFactory.build()
    fragmentarium.create(lemmatized_fragment)
    lines = lemmatized_fragment.text.atf.split("\n")
    lines[1] = new_transliteration
    updates = {
        "transliteration": "\n".join(lines),
        "notes": lemmatized_fragment.notes
    }
    updated_transliteration = transliteration_factory.create(
        updates["transliteration"], updates["notes"])
    updated_fragment = lemmatized_fragment.update_transliteration(
        updated_transliteration, user)
    expected_fragment = attr.evolve(
        updated_fragment,
        text=attr.evolve(
            updated_fragment.text,
            lines=parallel_line_injector.inject(updated_fragment.text.lines),
        ),
    )
    expected_json = create_response_dto(
        expected_fragment,
        user,
        lemmatized_fragment.number == MuseumNumber("K", "1"),
    )

    post_result = client.simulate_post(
        f"/fragments/{lemmatized_fragment.number}/transliteration",
        body=json.dumps(updates),
    )

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == expected_json

    updated_fragment = client.simulate_get(
        f"/fragments/{lemmatized_fragment.number}").json
    assert updated_fragment == expected_json
Пример #6
0
 def on_post(self, req: Request, resp: Response, number: str) -> None:
     try:
         user = req.context.user
         updated_fragment, has_photo = self._updater.update_transliteration(
             parse_museum_number(number),
             self._create_transliteration(req.media),
             user,
         )
         resp.media = create_response_dto(updated_fragment, user, has_photo)
     except TransliterationError as error:
         resp.status = falcon.HTTP_UNPROCESSABLE_ENTITY
         resp.media = {
             "title": resp.status,
             "description": str(error),
             "errors": error.errors,
         }
     except NotLowestJoinError as error:
         raise DataError(error) from error
Пример #7
0
def test_get(client, fragmentarium, parallel_line_injector, user):
    transliterated_fragment = TransliteratedFragmentFactory.build()
    fragmentarium.create(transliterated_fragment)
    result = client.simulate_get(
        f"/fragments/{transliterated_fragment.number}")

    expected_fragment = attr.evolve(
        transliterated_fragment,
        text=attr.evolve(
            transliterated_fragment.text,
            lines=parallel_line_injector.inject(
                transliterated_fragment.text.lines),
        ),
    )
    expected = create_response_dto(
        expected_fragment,
        user,
        transliterated_fragment.number == MuseumNumber("K", "1"),
    )

    assert result.json == expected
    assert result.status == falcon.HTTP_OK
def test_update_transliteration(client, fragmentarium, user, database):
    fragment = FragmentFactory.build()
    fragmentarium.create(fragment)
    updates = {
        "transliteration": "$ (the transliteration)",
        "notes": "some notes"
    }
    body = json.dumps(updates)
    url = f"/fragments/{fragment.number}/transliteration"
    post_result = client.simulate_post(url, body=body)

    expected_json = {
        **create_response_dto(
            fragment.update_transliteration(
                TransliterationUpdate(
                    parse_atf_lark(updates["transliteration"]), updates["notes"]),
                user,
            ),
            user,
            fragment.number == MuseumNumber("K", "1"),
        ),
        "signs":
        "",
    }

    assert post_result.status == falcon.HTTP_OK
    assert post_result.json == expected_json

    get_result = client.simulate_get(f"/fragments/{fragment.number}")
    assert get_result.json == expected_json

    assert database["changelog"].find_one({
        "resource_id":
        str(fragment.number),
        "resource_type":
        "fragments",
        "user_profile.name":
        user.profile["name"],
    })
Пример #9
0
 def on_get(self, req: Request, resp: Response, number: str):
     user: User = req.context.user
     fragment, has_photo = self._finder.find(parse_museum_number(number))
     resp.media = create_response_dto(fragment, user, has_photo)
 def on_post(self, req, resp, number):
     user = req.context.user
     updated_fragment, has_photo = self._updater.update_lemmatization(
         parse_museum_number(number),
         LemmatizationSchema().load(req.media), user)
     resp.media = create_response_dto(updated_fragment, user, has_photo)
Пример #11
0
 def on_post(self, req, resp, number) -> None:
     user = req.context.user
     updated_fragment, has_photo = self._updater.update_references(
         parse_museum_number(number),
         ReferencesDtoSchema().load(req.media), user)
     resp.media = create_response_dto(updated_fragment, user, has_photo)
def test_create_response_dto(user):
    lemmatized_fragment = LemmatizedFragmentFactory.build(
        joins=Joins(((JoinFactory.build(), ), )))
    has_photo = True
    assert create_response_dto(
        lemmatized_fragment, user, has_photo) == pydash.omit_by(
            {
                "museumNumber":
                attr.asdict(lemmatized_fragment.number),
                "accession":
                lemmatized_fragment.accession,
                "cdliNumber":
                lemmatized_fragment.cdli_number,
                "bmIdNumber":
                lemmatized_fragment.bm_id_number,
                "publication":
                lemmatized_fragment.publication,
                "description":
                lemmatized_fragment.description,
                "joins":
                JoinsSchema().dump(lemmatized_fragment.joins)["fragments"],
                "length":
                attr.asdict(lemmatized_fragment.length,
                            filter=lambda _, value: value is not None),
                "width":
                attr.asdict(lemmatized_fragment.width,
                            filter=lambda _, value: value is not None),
                "thickness":
                attr.asdict(lemmatized_fragment.thickness,
                            filter=lambda _, value: value is not None),
                "collection":
                lemmatized_fragment.collection,
                "script":
                lemmatized_fragment.script,
                "notes":
                lemmatized_fragment.notes,
                "museum":
                lemmatized_fragment.museum,
                "signs":
                lemmatized_fragment.signs,
                "record": [{
                    "user": entry.user,
                    "type": entry.type.value,
                    "date": entry.date
                } for entry in lemmatized_fragment.record.entries],
                "folios": [
                    attr.asdict(folio) for folio in
                    lemmatized_fragment.folios.filter(user).entries
                ],
                "text":
                TextSchema().dump(lemmatized_fragment.text),
                "references": [{
                    "id": reference.id,
                    "type": reference.type.name,
                    "pages": reference.pages,
                    "notes": reference.notes,
                    "linesCited": list(reference.lines_cited),
                } for reference in lemmatized_fragment.references],
                "uncuratedReferences": ([
                    attr.asdict(reference)
                    for reference in lemmatized_fragment.uncurated_references
                ] if lemmatized_fragment.uncurated_references is not None else
                                        None),
                "atf":
                lemmatized_fragment.text.atf,
                "hasPhoto":
                has_photo,
                "genres": [{
                    "category": genre.category,
                    "uncertain": genre.uncertain
                } for genre in lemmatized_fragment.genres],
                "lineToVec": [[
                    line_to_vec_encoding.value
                    for line_to_vec_encoding in line_to_vec_encodings
                ] for line_to_vec_encodings in lemmatized_fragment.line_to_vec
                              ],
            },
            pydash.is_none,
        )