Ejemplo n.º 1
0
    def create_record(cls, dump):
        """Create a new record from dump."""
        record_uuid = uuid.uuid4()
        try:
            # with db.session.begin_nested():
            provider = DocumentIdProvider.create(
                object_type="rec",
                object_uuid=record_uuid,
            )
            timestamp, json_data = dump.revisions[-1]
            json_data["pid"] = provider.pid.pid_value
            json_data = clean_created_by_field(json_data)

            document = Document.create(json_data, record_uuid)
            document.model.created = dump.created.replace(tzinfo=None)
            document.model.updated = timestamp.replace(tzinfo=None)
            document.commit()
            db.session.commit()

            return document
        except IlsValidationError as e:
            click.secho("Field: {}".format(e.errors[0].res["field"]), fg="red")
            click.secho(e.original_exception.message, fg="red")
            db.session.rollback()
            raise e
def test_document_creation_refs(app):
    """Test creation of a document."""
    d = dict(pid="a1bc",
             title="Test title",
             authors=[dict(full_name="John Doe")],
             publication_year="2010")
    doc = Document.create(d)

    _assert_extra_fields(doc)
Ejemplo n.º 3
0
def test_journal_relation_from_publication_info(app):
    """Test journal-document relation from publication info field."""

    document_data = {
        "$schema": "https://127.0.0.1:5000/schemas/documents/document-v1.0.0.json",
        "created_by": {"type": "script", "value": "test"},
        "pid": "4321",
        "legacy_recid": "1111",
        "title": "Book: A Book",
        "document_type": "BOOK",
        "authors": [{"full_name": "Author Author"}],
        "abstracts": [{"value": "This is an abstract"}],
        "language": ["it"],
        "publication_year": "2020",
        "identifiers": [{"scheme": "ISBN", "value": "0123456789"}],
        "cover_metadata": {"ISBN": "0123456789"},
        "publication_info": [{"journal_issue": "issue"}],
        "_migration": {
            "has_journal": True,
            "journal_record_legacy_recids": [
                {
                    "recid": "1234",
                    "volume": None,
                }
            ],
        },
    }

    journal_data = {
        "$schema": "https://127.0.0.1:5000/schemas/series/series-v1.0.0.json",
        "pid": "serid-4",
        "title": "Dispersion Forces",
        "authors": ["Buhmann, Stefan Yoshi"],
        "abstract": "This is a multipart monograph",
        "mode_of_issuance": "SERIAL",
        "legacy_recid": "1234",
    }

    record_uuid = mint_record_pid(
        DOCUMENT_PID_TYPE, "pid", {"pid": document_data["pid"]}
    )
    document = Document.create(document_data, record_uuid)
    record_uuid = mint_record_pid(
        SERIES_PID_TYPE, "pid", {"pid": journal_data["pid"]}
    )
    journal = Series.create(journal_data, record_uuid)
    legacy_recid_minter(journal["legacy_recid"], record_uuid)
    db.session.commit()
    ri = RecordIndexer()
    ri.index(document)
    ri.index(journal)
    current_search.flush_and_refresh(index="*")

    link_documents_and_serials()

    document_rec = Document.get_record_by_pid(document["pid"])
    assert "serial" in document_rec.relations
Ejemplo n.º 4
0
def create_multipart_volumes(pid, multipart_legacy_recid, migration_volumes):
    """Create multipart volume documents."""
    volumes = {}
    # Combine all volume data by volume number
    click.echo('Creating volume for {}...'.format(multipart_legacy_recid))
    for obj in migration_volumes:
        volume_number = obj['volume']
        if volume_number not in volumes:
            volumes[volume_number] = {}
        volume = volumes[volume_number]
        for key in obj:
            if key != 'volume':
                if key in volume:
                    raise KeyError(
                        'Duplicate key "{}" for multipart {}'.format(
                            key, multipart_legacy_recid))
                volume[key] = obj[key]

    volume_numbers = iter(sorted(volumes.keys()))

    # Re-use the current record for the first volume
    # TODO review this - there are more cases of multiparts
    first_volume = next(volume_numbers)
    first = Document.get_record_by_pid(pid)
    if 'title' in volumes[first_volume]:
        first['title'] = volumes[first_volume]['title']
        first['volume'] = first_volume
    first['_migration']['multipart_legacy_recid'] = multipart_legacy_recid
    # to be tested
    if 'legacy_recid' in first:
        del first['legacy_recid']
    first.commit()
    yield first

    # Create new records for the rest
    for number in volume_numbers:
        temp = first.copy()
        temp['title'] = volumes[number]['title']
        temp['volume'] = number
        record_uuid = uuid.uuid4()
        provider = DocumentIdProvider.create(
            object_type='rec',
            object_uuid=record_uuid,
        )
        temp['pid'] = provider.pid.pid_value
        record = Document.create(temp, record_uuid)
        record.commit()
        yield record
Ejemplo n.º 5
0
def test_document_creation_refs(app):
    """Test creation of a document."""
    d = dict(
        pid="a1bc",
        created_by={
            "type": "script",
            "value": "demo"
        },
        title="Test title",
        authors=[dict(full_name="John Doe")],
        publication_year="2010",
        document_type="BOOK",
    )
    doc = Document.create(d)

    _assert_extra_fields(doc)
def test_document_update_refs(app):
    """Test update of a document."""
    d = dict(pid="a1bc",
             title="Test title",
             authors=[dict(full_name="John Doe")],
             publication_year="2010")
    doc = Document.create(d)
    del doc["circulation"]
    del doc["relations"]
    del doc["eitems"]
    del doc["items"]
    del doc["stock"]
    doc.update(dict(title="Test title 2"))

    _assert_extra_fields(doc)
    assert doc["title"] == "Test title 2"
Ejemplo n.º 7
0
        def create_record(cls, dump):
            """Create a new record from dump."""
            # Reserve record identifier, create record and recid pid in one
            # operation.
            record_uuid = uuid.uuid4()
            provider = DocumentIdProvider.create(
                object_type='rec',
                object_uuid=record_uuid,
            )
            timestamp, json_data = dump.revisions[-1]
            json_data['pid'] = provider.pid.pid_value
            try:
                document = Document.create(json_data, record_uuid)
                document.model.created = dump.created.replace(tzinfo=None)
                document.model.updated = timestamp.replace(tzinfo=None)
                document.commit()
                db.session.commit()

                return document
            except IlsValidationError as e:
                click.secho(e.original_exception.message, fg='red')
Ejemplo n.º 8
0
    def create_record(cls, dump):
        """Create a new record from dump."""
        # Reserve record identifier, create record and recid pid in one
        # operation.
        timestamp, data = dump.latest
        record = Record.create(data)
        record_uuid = uuid.uuid4()
        provider = DocumentIdProvider.create(
            object_type='rec',
            object_uuid=record_uuid,
        )
        timestamp, json_data = dump.rest[-1]
        json_data['pid'] = provider.pid.pid_value
        record.model.json = json_data
        record.model.created = dump.created.replace(tzinfo=None)
        record.model.updated = timestamp.replace(tzinfo=None)
        document = Document.create(record.model.json, record_uuid)
        document.commit()
        db.session.commit()

        return document
Ejemplo n.º 9
0
def testdata(app, db, es_clear, system_user):
    """Create, index and return test data."""
    indexer = RecordIndexer()

    locations = load_json_from_datadir("locations.json")
    for location in locations:
        record = Location.create(location)
        mint_record_pid(LOCATION_PID_TYPE, "pid", record)
        record.commit()
        db.session.commit()
        indexer.index(record)

    internal_locations = load_json_from_datadir("internal_locations.json")
    for internal_location in internal_locations:
        record = InternalLocation.create(internal_location)
        mint_record_pid(
            INTERNAL_LOCATION_PID_TYPE, "pid", record
        )
        record.commit()
        db.session.commit()
        indexer.index(record)

    documents = load_json_from_datadir("documents.json")
    for doc in documents:
        record = Document.create(doc)
        mint_record_pid(DOCUMENT_PID_TYPE, "pid", record)
        record.commit()
        db.session.commit()
        indexer.index(record)

    items = load_json_from_datadir("items.json")
    for item in items:
        record = Item.create(item)
        mint_record_pid(ITEM_PID_TYPE, "pid", record)
        record.commit()
        db.session.commit()
        indexer.index(record)

    loans = load_json_from_datadir("loans.json")
    for loan in loans:
        record = Loan.create(loan)
        mint_record_pid(CIRCULATION_LOAN_PID_TYPE, "pid", record)
        record.commit()
        db.session.commit()
        indexer.index(record)

    series = load_json_from_datadir("series.json")
    for serie in series:
        record = Series.create(serie)
        mint_record_pid(SERIES_PID_TYPE, "pid", record)
        record.commit()
        db.session.commit()
        indexer.index(record)

    # flush all indices after indexing, otherwise ES won't be ready for tests
    current_search.flush_and_refresh(index='*')
    return {
        "documents": documents,
        "items": items,
        "loans": loans,
        "locations": locations,
        "series": series,
    }
Ejemplo n.º 10
0
def create_multipart_volumes(
    pid, multipart_legacy_recid, migration_volumes, document_base_metadata
):
    """Create multipart volume documents."""
    volumes = {}
    # Combine all volume data by volume number
    click.echo("Creating volume for {}...".format(multipart_legacy_recid))
    for obj in migration_volumes:
        volume_number = obj["volume"]
        if volume_number not in volumes:
            volumes[volume_number] = {}
        volume = volumes[volume_number]
        if "isbn" in obj:
            # the isbn can represent both a document and an eitem
            if "isbns" not in volume:
                volume["isbns"] = []
            volume["isbns"].append({
                "value": obj["isbn"],
                "is_electronic": bool(obj["is_electronic"])
            })
            # TODO physical description
        elif "barcode" in obj:
            # the barcode represents an item
            if "items" not in volume:
                volume["items"] = []
            volume["items"].append({
                "barcode": obj["barcode"]
            })
        else:
            # all other fields should be treated as
            # additional metadata for the document
            for key in obj:
                if key != "volume":
                    if key in volume:
                        # abort in case of conflict
                        raise KeyError(
                            'Duplicate key "{}" for multipart {}'.format(
                                key, multipart_legacy_recid
                            )
                        )
                    volume[key] = obj[key]

    volume_numbers = iter(sorted(volumes.keys()))

    inherited_metadata = deepcopy(document_base_metadata)
    inherited_metadata["_migration"]["multipart_legacy_recid"] = \
        multipart_legacy_recid
    inherited_metadata["authors"] = \
        inherited_metadata["_migration"]["authors"] \
        if "authors" in inherited_metadata["_migration"] else []
    inherited_metadata["serial_title"] = inherited_metadata.get("title")

    # to be tested
    if "legacy_recid" in inherited_metadata:
        del inherited_metadata["legacy_recid"]

    # Create new records for the rest
    for number in volume_numbers:
        volume = volumes[number]
        temp = inherited_metadata.copy()
        if "title" in volume and volume["title"]:
            temp["title"] = volume["title"]
        temp["volume"] = number
        # TODO possibly more fields to merge

        record_uuid = uuid.uuid4()
        try:
            with db.session.begin_nested():
                provider = DocumentIdProvider.create(
                    object_type="rec", object_uuid=record_uuid
                )
                temp["pid"] = provider.pid.pid_value
                record = Document.create(temp, record_uuid)
                record.commit()
            db.session.commit()
            yield record
        except IlsValidationError as e:
            print("Validation error: {}"
                  .format(str(e.original_exception.message)))