Ejemplo n.º 1
0
def test_import_documents(app, db):
    document_cls = current_app_ils.document_record_cls
    eitem_search_cls = current_app_ils.eitem_search_cls
    eitem_cls = current_app_ils.eitem_record_cls

    json_data = load_json_from_datadir("create_documents_data.json",
                                       relpath="importer")
    importer = Importer(json_data[0], "springer")
    report = importer.import_record()
    assert report["document_json"]
    assert report["action"] == "create"

    document = document_cls.get_record_by_pid(report["document_json"]["pid"])
    time.sleep(1)
    search = eitem_search_cls().search_by_document_pid(
        document_pid=document["pid"])
    results = search.execute()
    assert results.hits.total.value == 1

    eitem_pid = results.hits[0].pid
    eitem = eitem_cls.get_record_by_pid(eitem_pid)

    assert eitem["document_pid"] == document["pid"]

    assert "_eitem" not in document
    assert "agency_code" not in document

    assert eitem["created_by"] == {"type": "import", "value": "springer"}
    assert document["created_by"] == {"type": "import", "value": "springer"}
Ejemplo n.º 2
0
def test_add_document_to_serial(app, db):
    document_cls = current_app_ils.document_record_cls
    series_cls = current_app_ils.series_record_cls

    json_data = load_json_from_datadir("new_document_with_serial.json",
                                       relpath="importer")

    importer = Importer(json_data[0], "springer")

    report = importer.import_record()
    assert report["document_json"]
    assert report["action"] == "create"
    assert report["series"]

    created_document = document_cls.get_record_by_pid(
        report["document_json"]["pid"])

    series_list = []
    for series in report["series"]:
        series_list.append(
            series_cls.get_record_by_pid(series["series_record"]["pid"]))

    assert series_list[0]["title"] == "Advances in Nuclear Physics ;"
    assert series_list[0]["identifiers"] == [{
        "scheme": "ISSN",
        "value": "123455"
    }]
    # check if relations creates
    assert (created_document["relations_extra_metadata"]["serial"][0]
            ["pid_value"] == series_list[0]["pid"])
    assert (created_document["relations_extra_metadata"]["serial"][0]["volume"]
            == "26")
Ejemplo n.º 3
0
def test_replace_eitems_by_provider_priority(importer_test_data):
    document_cls = current_app_ils.document_record_cls
    eitem_search_cls = current_app_ils.eitem_search_cls
    eitem_cls = current_app_ils.eitem_record_cls

    json_data = load_json_from_datadir("modify_document_data.json",
                                       relpath="importer")

    document_before_update = document_cls.get_record_by_pid("docid-1")
    search = eitem_search_cls().search_by_document_pid(
        document_pid=document_before_update["pid"])
    results = search.execute()
    assert results.hits.total.value == 1
    eitem_before_update = eitem_cls.get_record_by_pid(results.hits[0].pid)
    assert eitem_before_update["created_by"] == {
        "type": "import",
        "value": "ebl",
    }

    ProviderImporter = Importer
    ProviderImporter.IS_PROVIDER_PRIORITY_SENSITIVE = True
    importer = ProviderImporter(json_data[1], "springer")
    report = importer.import_record()
    assert report["document_json"]
    assert report["action"] == "update"

    updated_document = document_cls.get_record_by_pid(
        report["document_json"]["pid"])
    # wait for indexing
    time.sleep(1)

    search = eitem_search_cls().search_by_document_pid(
        document_pid=updated_document["pid"])
    results = search.execute()

    # check if previous eitems deleted, and added only one from this provider
    assert results.hits.total.value == 1
    eitem_pid = results.hits[0].pid
    updated_eitem = eitem_cls.get_record_by_pid(eitem_pid)
    assert updated_eitem["created_by"] == {
        "type": "import",
        "value": "springer",
    }
    assert updated_eitem["description"] == "EITEM TO OVERWRITE"
Ejemplo n.º 4
0
def testdata_most_loaned(db, testdata):
    """Create, index and return test data for most loans tests."""
    most_loaned = load_json_from_datadir("loans_most_loaned.json")
    recs = _create_records(db, most_loaned, Loan, CIRCULATION_LOAN_PID_TYPE)

    ri = RecordIndexer()
    for rec in recs:
        ri.index(rec)

    current_search.flush_and_refresh(index="loans")

    return {
        "locations": testdata["locations"],
        "internal_locations": testdata["internal_locations"],
        "documents": testdata["documents"],
        "items": testdata["items"],
        "loans": most_loaned,
        "series": testdata["series"],
    }
Ejemplo n.º 5
0
def test_modify_documents(importer_test_data):
    document_cls = current_app_ils.document_record_cls
    eitem_search_cls = current_app_ils.eitem_search_cls
    eitem_cls = current_app_ils.eitem_record_cls

    json_data = load_json_from_datadir("modify_document_data.json",
                                       relpath="importer")

    importer = Importer(json_data[0], "springer")
    report = importer.import_record()
    assert report["document_json"]
    assert report["action"] == "update"

    updated_document = document_cls.get_record_by_pid(
        report["document_json"]["pid"])
    # wait for indexing
    current_search.flush_and_refresh(index="*")

    search = eitem_search_cls().search_by_document_pid(
        document_pid=updated_document["pid"])
    results = search.execute()

    assert results.hits.total.value == 1

    eitem_pid = results.hits[0].pid
    updated_eitem = eitem_cls.get_record_by_pid(eitem_pid)

    # check if new identifier added
    assert updated_document["identifiers"] == [
        {
            "scheme": "DOI",
            "value": "0123456789"
        },
        {
            "scheme": "ISBN",
            "value": "0987654321"
        },
    ]

    assert updated_eitem["description"] == "Modified description"
Ejemplo n.º 6
0
def test_data_migration(app, db, es_clear, patrons):
    """Prepare minimal data for migration tests."""
    data = load_json_from_datadir("locations.json")
    locations = _create_records(db, data, Location, LOCATION_PID_TYPE)

    data = load_json_from_datadir("internal_locations.json")
    int_locs = _create_records(
        db, data, InternalLocation, INTERNAL_LOCATION_PID_TYPE
    )

    data = load_json_from_datadir("documents.json")
    documents = _create_records(db, data, Document, DOCUMENT_PID_TYPE)

    data = load_json_from_datadir("items.json")
    items = _create_records(db, data, Item, ITEM_PID_TYPE)

    data = load_json_from_datadir("ill_libraries.json")
    ill_libraries = _create_records(db, data, Provider, PROVIDER_PID_TYPE)

    data = load_json_from_datadir("vendors.json")
    vendors = _create_records(db, data, Provider, PROVIDER_PID_TYPE)

    # index
    ri = RecordIndexer()
    for rec in (
        locations + int_locs + documents + items + ill_libraries + vendors
    ):
        ri.index(rec)

    # wait for indexing
    time.sleep(1)
    create_default_records()
    patron = Patron(patrons[0].id)
    PatronIndexer().index(patron)

    current_search.flush_and_refresh(index="*")
Ejemplo n.º 7
0
def testdata(app, db, es_clear, users):
    """Create, index and return test data."""
    data = load_json_from_datadir("locations.json")
    locations = _create_records(db, data, Location, LOCATION_PID_TYPE)

    data = load_json_from_datadir("internal_locations.json")
    int_locs = _create_records(db, data, InternalLocation,
                               INTERNAL_LOCATION_PID_TYPE)

    data = load_json_from_datadir("series.json")
    series = _create_records(db, data, Series, SERIES_PID_TYPE)

    data = load_json_from_datadir("documents.json")
    documents = _create_records(db, data, Document, DOCUMENT_PID_TYPE)

    data = load_json_from_datadir("items.json")
    items = _create_records(db, data, Item, ITEM_PID_TYPE)

    data = load_json_from_datadir("eitems.json")
    eitems = _create_records(db, data, EItem, EITEM_PID_TYPE)

    data = load_json_from_datadir("loans.json")
    loans = _create_records(db, data, Loan, CIRCULATION_LOAN_PID_TYPE)

    data = load_json_from_datadir("document_requests.json")
    doc_reqs = _create_records(db, data, DocumentRequest,
                               DOCUMENT_REQUEST_PID_TYPE)

    data = load_json_from_datadir("acq_vendors.json")
    acq_vendors = _create_records(db, data, Vendor, VENDOR_PID_TYPE)

    data = load_json_from_datadir("acq_orders.json")
    acq_orders = _create_records(db, data, Order, ORDER_PID_TYPE)

    data = load_json_from_datadir("ill_libraries.json")
    ill_libraries = _create_records(db, data, Library, LIBRARY_PID_TYPE)

    data = load_json_from_datadir("ill_borrowing_requests.json")
    ill_brw_reqs = _create_records(db, data, BorrowingRequest,
                                   BORROWING_REQUEST_PID_TYPE)

    # index
    ri = RecordIndexer()
    for rec in (locations + int_locs + series + documents + items + eitems +
                loans + doc_reqs + acq_vendors + acq_orders + ill_libraries +
                ill_brw_reqs):
        ri.index(rec)

    current_search.flush_and_refresh(index="*")

    return {
        "document_requests": doc_reqs,
        "documents": documents,
        "internal_locations": int_locs,
        "items": items,
        "eitems": eitems,
        "loans": loans,
        "locations": locations,
        "series": series,
        "acq_vendors": acq_vendors,
        "acq_orders": acq_orders,
        "ill_libraries": ill_libraries,
        "ill_brw_reqs": ill_brw_reqs,
    }