def create_record(cls, dump, rectype): """Create a new record from dump.""" series_cls = current_app_ils.series_record_cls record_uuid = uuid.uuid4() try: with db.session.begin_nested(): provider = SeriesIdProvider.create( object_type="rec", object_uuid=record_uuid, ) timestamp, json_data = dump.revisions[-1] json_data["pid"] = provider.pid.pid_value json_data = clean_created_by_field(json_data) if rectype == "journal": legacy_recid_minter(json_data["legacy_recid"], record_uuid) add_cover_metadata(json_data) series = series_cls.create(json_data, record_uuid) series.model.created = dump.created.replace(tzinfo=None) series.model.updated = timestamp.replace(tzinfo=None) series.commit() db.session.commit() return series except IlsValidationError as e: click.secho("Field: {}".format(e.errors[0].res["field"]), fg="red") click.secho(e.original_exception.message, fg="red") raise e
def test_journal_relation_from_publication_info(app): """Test journal-document relation from publication info field.""" document_data = { "$schema": "https://127.0.0.1:5000/schemas/documents/document-v1.0.0.json", "created_by": {"type": "script", "value": "test"}, "pid": "4321", "legacy_recid": "1111", "title": "Book: A Book", "document_type": "BOOK", "authors": [{"full_name": "Author Author"}], "abstracts": [{"value": "This is an abstract"}], "language": ["it"], "publication_year": "2020", "identifiers": [{"scheme": "ISBN", "value": "0123456789"}], "cover_metadata": {"ISBN": "0123456789"}, "publication_info": [{"journal_issue": "issue"}], "_migration": { "has_journal": True, "journal_record_legacy_recids": [ { "recid": "1234", "volume": None, } ], }, } journal_data = { "$schema": "https://127.0.0.1:5000/schemas/series/series-v1.0.0.json", "pid": "serid-4", "title": "Dispersion Forces", "authors": ["Buhmann, Stefan Yoshi"], "abstract": "This is a multipart monograph", "mode_of_issuance": "SERIAL", "legacy_recid": "1234", } record_uuid = mint_record_pid( DOCUMENT_PID_TYPE, "pid", {"pid": document_data["pid"]} ) document = Document.create(document_data, record_uuid) record_uuid = mint_record_pid( SERIES_PID_TYPE, "pid", {"pid": journal_data["pid"]} ) journal = Series.create(journal_data, record_uuid) legacy_recid_minter(journal["legacy_recid"], record_uuid) db.session.commit() ri = RecordIndexer() ri.index(document) ri.index(journal) current_search.flush_and_refresh(index="*") link_documents_and_serials() document_rec = Document.get_record_by_pid(document["pid"]) assert "serial" in document_rec.relations
def import_record(self): """Import CDS record with legacy recid.""" document_class = current_app_ils.document_record_cls summary = super().import_record() if summary["action"] == "create" and summary["output_pid"]: legacy_pid_type = current_app.config[ "CDS_ILS_RECORD_LEGACY_PID_TYPE" ] document = document_class.get_record_by_pid(summary["output_pid"]) record_uuid = document.pid.object_uuid legacy_recid_minter( document["legacy_recid"], legacy_pid_type, record_uuid ) return summary
def create_record(cls, dump): """Create a new record from dump.""" document_cls = current_app_ils.document_record_cls record_uuid = uuid.uuid4() timestamp, json_data = dump.revisions[-1] json_data = clean_created_by_field(json_data) add_cover_metadata(json_data) try: with db.session.begin_nested(): # checks if the document with this legacy_recid already exists legacy_recid_minter(json_data["legacy_recid"], record_uuid) provider = DocumentIdProvider.create( object_type="rec", object_uuid=record_uuid, ) json_data["pid"] = provider.pid.pid_value document = document_cls.create(json_data, record_uuid) document.model.created = dump.created.replace(tzinfo=None) document.model.updated = timestamp.replace(tzinfo=None) document.commit() db.session.commit() return document except IlsValidationError as e: click.secho("Field: {}".format(e.errors[0].res["field"]), fg="red") click.secho(e.original_exception.message, fg="red") raise e except PIDAlreadyExists as e: allow_updates = \ current_app.config.get("CDS_ILS_MIGRATION_ALLOW_UPDATES") if not allow_updates: raise e # update document if already exists with legacy_recid document = get_record_by_legacy_recid(document_cls, json_data["legacy_recid"]) document.update(json_data) document.model.updated = timestamp.replace(tzinfo=None) document.commit() db.session.commit() return document
def create_record(cls, dump): """Create a new record from dump.""" document_cls = current_app_ils.document_record_cls record_uuid = uuid.uuid4() timestamp, json_data = dump.revisions[-1] json_data = clean_created_by_field(json_data) vocabulary_validator.validate(VOCABULARIES_FIELDS, json_data) add_cover_metadata(json_data) add_title_from_conference_info(json_data) add_cds_url(json_data) try: with db.session.begin_nested(): # checks if the document with this legacy_recid already exists legacy_pid_type = current_app.config[ "CDS_ILS_RECORD_LEGACY_PID_TYPE" ] # First mint the legacy_recid before assigning the pid. In case # it fails while importing an existing record we will update it # and don't want the new pid, since there is already one there legacy_recid_minter( json_data["legacy_recid"], legacy_pid_type, record_uuid ) provider = DocumentIdProvider.create( object_type="rec", object_uuid=record_uuid, ) # requirement from the library if ( json_data["_migration"]["has_journal"] and json_data["document_type"] != "PROCEEDINGS" ): json_data["document_type"] = "SERIAL_ISSUE" json_data["pid"] = provider.pid.pid_value document = document_cls.create(json_data, record_uuid) created_date = json_data.get( "_created", CDS_ILS_FALLBACK_CREATION_DATE ) document.model.created = parser.parse(created_date) document.model.updated = timestamp.replace(tzinfo=None) document.commit() db.session.commit() documents_logger.info( "CREATED", extra=dict( legacy_id=json_data["legacy_recid"], new_pid=document["pid"], status="SUCCESS", ), ) return document except IlsValidationError as e: click.secho("Field: {}".format(e.errors[0].res["field"]), fg="red") click.secho(e.original_exception.message, fg="red") raise e except PIDAlreadyExists as e: allow_updates = current_app.config.get( "CDS_ILS_MIGRATION_ALLOW_UPDATES" ) if not allow_updates: raise e # update document if already exists with legacy_recid legacy_pid_type = current_app.config[ "CDS_ILS_RECORD_LEGACY_PID_TYPE" ] # When updating we don't want to change the pid if "pid" in json_data: del json_data["pid"] document = get_record_by_legacy_recid( document_cls, legacy_pid_type, json_data["legacy_recid"] ) document.update(json_data) document.model.updated = timestamp.replace(tzinfo=None) document.commit() db.session.commit() documents_logger.info( "UPDATED", extra=dict( legacy_id=json_data["legacy_recid"], new_pid=document["pid"], status="SUCCESS", ), ) return document
def create_record(cls, dump, rectype, legacy_id, mint_legacy_pid=True, log_extra={}): """Create a new record from dump.""" records_logger = logging.getLogger(f"{rectype}s_logger") model, pid_provider = model_provider_by_rectype(rectype) document_class = current_app_ils.document_record_cls series_class = current_app_ils.series_record_cls try: with db.session.begin_nested(): record_uuid = uuid.uuid4() provider = pid_provider.create( object_type="rec", object_uuid=record_uuid, ) dump["pid"] = provider.pid.pid_value if mint_legacy_pid: legacy_pid_type = get_legacy_pid_type_by_provider(provider) legacy_recid_minter(legacy_id, legacy_pid_type, record_uuid) record = model.create(dump, record_uuid) if isinstance(record, document_class) \ or isinstance(record, series_class): created_date = dump.get("_created", CDS_ILS_FALLBACK_CREATION_DATE) record.model.created = parser.parse(created_date) record.commit() db.session.commit() records_logger.info( "CREATED", extra=dict( new_pid=record["pid"], status="SUCCESS", legacy_id=legacy_id, **log_extra, ), ) return record except IlsValidationError as e: db.session.rollback() raise e except PIDAlreadyExists as e: allow_updates = current_app.config.get( "CDS_ILS_MIGRATION_ALLOW_UPDATES") if not allow_updates: raise e if legacy_pid_type: # update record if already exists with legacy_recid record = get_record_by_legacy_recid(model, legacy_pid_type, legacy_id) # When updating we don't want to change the pid if "pid" in dump: del dump["pid"] record.update(dump) record.commit() db.session.commit() records_logger.info( "UPDATED", extra=dict( new_pid=record["pid"], status="SUCCESS", legacy_id=legacy_id, **log_extra, ), ) return record
def create_record(cls, dump, rectype): """Create a new record from dump.""" records_logger = logging.getLogger(f"{rectype}s_logger") series_cls = current_app_ils.series_record_cls record_uuid = uuid.uuid4() try: with db.session.begin_nested(): timestamp, json_data = dump.revisions[-1] if rectype == 'serial'\ and serial_already_exists(json_data["title"]): return json_data = clean_created_by_field(json_data) vocabulary_validator.validate(VOCABULARIES_FIELDS, json_data) provider = SeriesIdProvider.create( object_type="rec", object_uuid=record_uuid, ) add_cds_url(json_data) json_data["pid"] = provider.pid.pid_value if rectype == "journal": legacy_pid_type = current_app.config[ "CDS_ILS_SERIES_LEGACY_PID_TYPE"] legacy_recid_minter(json_data["legacy_recid"], legacy_pid_type, record_uuid) add_cover_metadata(json_data) series = series_cls.create(json_data, record_uuid) created_date = json_data.get("_created", CDS_ILS_FALLBACK_CREATION_DATE) series.model.created = parser.parse(created_date) series.model.updated = timestamp.replace(tzinfo=None) series.commit() db.session.commit() records_logger.info( "CREATED", extra=dict( new_pid=series["pid"], status="SUCCESS", legacy_id=json_data["legacy_recid"], ), ) return series except PIDAlreadyExists as e: allow_updates = current_app.config.get( "CDS_ILS_MIGRATION_ALLOW_UPDATES") if not allow_updates: raise e # update document if already exists with legacy_recid legacy_pid_type = current_app.config[ "CDS_ILS_SERIES_LEGACY_PID_TYPE"] # When updating we don't want to change the pid if "pid" in json_data: del json_data["pid"] series = get_record_by_legacy_recid(series_cls, legacy_pid_type, json_data["legacy_recid"]) series.update(json_data) series.model.updated = timestamp.replace(tzinfo=None) series.commit() db.session.commit() records_logger.info( "UPDATED", extra=dict( legacy_id=json_data["legacy_recid"], new_pid=series["pid"], status="SUCCESS", ), ) return series