def test_record_draft(app, db): """Test RecordDraft API.""" assert PersistentIdentifier.query.count() == 0 assert PIDRelation.query.count() == 0 d1 = PersistentIdentifier.create('depid', '1', object_type='rec') r1 = PersistentIdentifier.create('recid', '1', object_type='rec') assert PersistentIdentifier.query.count() == 2 RecordDraft.link(recid=r1, depid=d1) assert PIDRelation.query.count() == 1 pr = PIDRelation.query.one() RECORD_DRAFT = resolve_relation_type_config('record_draft').id assert pr.relation_type == RECORD_DRAFT assert pr.index is None assert pr.parent == r1 assert pr.child == d1 d2 = PersistentIdentifier.create('depid', '2', object_type='rec') r2 = PersistentIdentifier.create('recid', '2', object_type='rec') with pytest.raises(Exception) as excinfo: RecordDraft.link(recid=r1, depid=d2) assert 'already has a depid as a draft' in str(excinfo.value) with pytest.raises(Exception) as excinfo: RecordDraft.link(recid=r2, depid=d1) assert 'already is a draft of a recid' in str(excinfo.value)
def test_register(logger, app): """Test pid register.""" with app.app_context(): i = 1 for s in [PIDStatus.NEW, PIDStatus.RESERVED]: pid = PersistentIdentifier.create('rec', str(i), status=s) i += 1 assert pid.register() assert logger.info.call_args[0][0].startswith("Registered PID") for s in [ PIDStatus.REGISTERED, PIDStatus.DELETED, PIDStatus.REDIRECTED ]: pid = PersistentIdentifier.create('rec', str(i), status=s) i += 1 pytest.raises(PIDInvalidAction, pid.register) # Test logging of bad errors. pid = PersistentIdentifier.create('rec', str(i), status=PIDStatus.RESERVED) with patch('invenio_pidstore.models.db.session.begin_nested') as mock: mock.side_effect = SQLAlchemyError() pytest.raises(SQLAlchemyError, pid.register) assert logger.exception.call_args[0][0].startswith( "Failed to register") assert 'pid' in logger.exception.call_args[1]['extra']
def without_doi(cls, **kwargs): changes = request.json['changes'] authors = request.json['authors'] datasetUrl = request.json['datasetUrl'] article = {} article['title'] = {changes['title_lang']: changes['title_val']} article['abstract'] = { changes['abstract_lang']: changes['abstract_val'] } article['authors'] = authors article['document_type'] = changes['document_type'] always_merger.merge(article, { "_primary_community": 'cesnet', "access_right_category": "success" }) article['datasets'] = [datasetUrl] print(article) record_uuid = uuid.uuid4() pid = article_minter(record_uuid, article) record = cls.create(data=article, id_=record_uuid) indexer = cls.DOCUMENT_INDEXER() indexer.index(record) PersistentIdentifier.create('dpsart', pid.pid_value, object_type='rec', object_uuid=record_uuid, status=PIDStatus.REGISTERED) db.session.commit() return Response(status=302, headers={"Location": record.canonical_url})
def test_record_page(app, db, es, event_queues, full_record): """Test record page views.""" full_record['conceptdoi'] = '10.1234/foo.concept' full_record['conceptrecid'] = 'foo.concept' r = Record.create(full_record) PersistentIdentifier.create( 'recid', '12345', object_type='rec', object_uuid=r.id, status=PIDStatus.REGISTERED) db.session.commit() with app.test_client() as client: record_url = url_for('invenio_records_ui.recid', pid_value='12345') assert client.get(record_url).status_code == 200 process_events(['record-view']) current_search.flush_and_refresh(index='events-stats-record-view') search = Search(using=es, index='events-stats-record-view') assert search.count() == 1 doc = search.execute()[0] assert doc['doi'] == '10.1234/foo.bar' assert doc['conceptdoi'] == '10.1234/foo.concept' assert doc['recid'] == '12345' assert doc['conceptrecid'] == 'foo.concept' assert doc['resource_type'] == {'type': 'publication', 'subtype': 'book'} assert doc['access_right'] == 'open' assert doc['communities'] == ['zenodo'] assert doc['owners'] == [1]
def external_minters(record_uuid, data, pid_key='pid'): """External minters. RERO DOC and ARK. :param record_uuid: Record UUID. :param data: Record data. :param pid_key: PID key. :returns: Created PID object. """ for identifier in data.get('identifiedBy', []): if identifier.get('source') == 'RERO DOC': try: pid = PersistentIdentifier.create('rerod', identifier['value'], object_type='rec', object_uuid=record_uuid, status=PIDStatus.REGISTERED) pid.redirect(PersistentIdentifier.get('doc', data[pid_key])) except PIDAlreadyExists: pass if not data.get('harvested') and current_ark: ark_id = current_ark.ark_from_id(data[pid_key]) try: pid = PersistentIdentifier.create('ark', ark_id, object_type='rec', object_uuid=record_uuid, status=PIDStatus.RESERVED) # TODO: this minter is called twice why? except PIDAlreadyExists: pass data['ark'] = ark_id
def test_funder_ep_resolving(app, db): """Test funder resolving through entry point-registered JSON resolver.""" json1 = { 'internal_id': '10.13039/001', 'parent': '', 'name': 'Foo', } json2 = { 'internal_id': '10.13039/002', 'parent': { '$ref': 'http://dx.doi.org/10.13039/001' }, 'name': 'Bar', } r1 = R.create(json1) PID.create('frdoi', json1['internal_id'], object_type='rec', object_uuid=r1.id, status=PIDStatus.REGISTERED) r2 = R.create(json2) PID.create('frdoi', json2['internal_id'], object_type='rec', object_uuid=r2.id, status=PIDStatus.REGISTERED) assert r2.replace_refs()['parent'] == json1
def create(cls, dump): """Create record based on dump.""" # If 'record' is not present, just create the PID if not dump.data.get('record'): try: PersistentIdentifier.get(pid_type='docid', pid_value=dump.recid) except PIDDoesNotExistError: PersistentIdentifier.create( 'docid', dump.recid, status=PIDStatus.RESERVED ) db.session.commit() return None dump.prepare_revisions() dump.prepare_pids() dump.prepare_files() # if we have a final revision - to remove when data cleaned. try: # import ipdb;ipdb.set_trace() if dump.revisions[-1]: record = cls.create_record(dump) return record except IndexError as e: click.secho("Revision problem", fg='red')
def test_deposit_index(db, es): """Test update embargoed records.""" deposit_index_name = 'deposits-records-record-v1.0.0' rec1 = Record.create({ 'title': 'One', '_deposit': { 'status': 'published', 'pid': { 'type': 'recid', 'value': '1' } } }) PersistentIdentifier.create(pid_type='recid', pid_value='1', status=PIDStatus.REGISTERED, object_uuid=rec1.id, object_type='rec') Deposit.create({ '_deposit': { 'status': 'published', 'pid': { 'type': 'recid', 'value': '1' } } }) db.session.commit() current_search.flush_and_refresh(deposit_index_name) res = current_search.client.search(index=deposit_index_name) # Make sure the 'title' was indexed from record assert res['hits']['hits'][0]['_source']['title'] == 'One'
def test_deposit_index(db, es): """Test update embargoed records.""" deposit_index_name = 'deposits-records-record-v1.0.0' rec1 = Record.create({ 'title': 'One', '_deposit': { 'status': 'published', 'pid': { 'type': 'recid', 'value': '1' } } }) PersistentIdentifier.create(pid_type='recid', pid_value='1', status=PIDStatus.REGISTERED, object_uuid=rec1.id, object_type='rec') Deposit.create({ '_deposit': { 'status': 'published', 'pid': { 'type': 'recid', 'value': '1' } } }) db.session.commit() current_search.flush_and_refresh(deposit_index_name) res = current_search.client.search(index=deposit_index_name) # Make sure the 'title' was indexed from record assert res['hits']['hits'][0]['_source']['title'] == 'One'
def create_pids(cls, dump, deposit): """Create a persistent identifiers.""" # Mark deposit deleted if recid is deleted. recid = dump.recid_pid # Create depid depid = PersistentIdentifier.create( pid_type='depid', pid_value=str(dump.depid), object_type='rec', object_uuid=deposit.id, status=PIDStatus.REGISTERED ) if recid and recid.status == PIDStatus.DELETED: depid.delete() if RecordIdentifier.query.get(dump.depid) is None: RecordIdentifier.insert(dump.depid) # Pre-reserved recid. if not recid and dump.recid: if dump.has_pid: # Published deposit without a recid (this is an upload which # never got ingested so we set it back to draft status and # reserves the reid). pass recid = PersistentIdentifier.create( pid_type='recid', pid_value=str(dump.recid), status=PIDStatus.RESERVED ) if RecordIdentifier.query.get(dump.recid) is None: RecordIdentifier.insert(dump.recid) return depid, recid
def article_minter(record_uuid, data): """Similar to Dataset minter, but also creates DOI PIDs for articles.""" assert 'id' not in data doi = get_doi(data) doi_ids = [ d.value for d in data.get('identifiers', []) if d.scheme == 'doi' ] if doi and doi not in doi_ids: # Append DOI to additional article identifiers metadata field data.setdefault('identifiers', []).append({ 'scheme': 'doi', 'value': doi }) # And persist DOI PID reference to record in database PersistentIdentifier.create('doi', doi, object_type='rec', object_uuid=record_uuid, status=PIDStatus.REGISTERED) provider = ArticleProvider.create( object_type='rec', object_uuid=record_uuid, ) data['id'] = provider.pid.pid_value return provider.pid
def grant_records(db, funder_record): """Create grant records.""" grants = [ Record.create( dict( internal_id='10.13039/501100000780::282896', funder={'$ref': 'https://dx.doi.org/10.13039/501100000780'}, identifiers=dict( eurepo='info:eu-repo/grantAgreement/EC/FP7/282896', ), code='282896', title='Open Access Research Infrastructure in Europe', acronym='OpenAIREplus', program='FP7', )), Record.create( dict( internal_id='10.13039/501100000780::027819', funder={'$ref': 'https://dx.doi.org/10.13039/501100000780'}, identifiers=dict( eurepo='info:eu-repo/grantAgreement/EC/FP6/027819', ), code='027819', title='Integrating cognition, emotion and autonomy', acronym='ICEA', program='FP6', )), ] for g in grants: PersistentIdentifier.create(pid_type='grant', pid_value=g['internal_id'], object_type='rec', object_uuid=g.id, status='R') db.session.commit() return grants
def test_register(logger, app): """Test pid register.""" with app.app_context(): i = 1 for s in [PIDStatus.NEW, PIDStatus.RESERVED]: pid = PersistentIdentifier.create('rec', str(i), status=s) i += 1 assert pid.register() assert logger.info.call_args[0][0].startswith( "Registered PID") for s in [PIDStatus.REGISTERED, PIDStatus.DELETED, PIDStatus.REDIRECTED]: pid = PersistentIdentifier.create('rec', str(i), status=s) i += 1 pytest.raises(PIDInvalidAction, pid.register) # Test logging of bad errors. pid = PersistentIdentifier.create('rec', str(i), status=PIDStatus.RESERVED) with patch('invenio_pidstore.models.db.session.begin_nested') as mock: mock.side_effect = SQLAlchemyError() pytest.raises(SQLAlchemyError, pid.register) assert logger.exception.call_args[0][0].startswith( "Failed to register") assert 'pid' in logger.exception.call_args[1]['extra']
def create_deposit_and_record(pid_value, owner): """Utility function for creating records and deposits.""" recid = PersistentIdentifier.create( 'recid', pid_value, status=PIDStatus.RESERVED) pv = PIDVersioning(parent=conceptrecid) pv.insert_draft_child(recid) depid = PersistentIdentifier.create( 'depid', pid_value, status=PIDStatus.REGISTERED) deposit = ZenodoRecord.create({'_deposit': {'id': depid.pid_value}, 'conceptrecid': conceptrecid.pid_value, 'recid': recid.pid_value}) deposit.commit() depid.assign('rec', deposit.id) record_metadata = deepcopy(minimal_record) record_metadata['_deposit'] = {'id': depid.pid_value} record_metadata['conceptrecid'] = conceptrecid.pid_value record_metadata['recid'] = int(recid.pid_value) record_metadata['owners'] = [owner.id] record = ZenodoRecord.create(record_metadata) zenodo_record_minter(record.id, record) record.commit() return (depid, deposit, recid, record)
def weko_deposit_minter(record_uuid, data): """Weko deposit.""" id_ = RecordIdentifier.next() recid = PersistentIdentifier.create('recid', str(id_), object_type='rec', object_uuid=record_uuid, status=PIDStatus.REGISTERED) # Create depid with same pid_value of the recid depid = PersistentIdentifier.create( 'depid', str(recid.pid_value), object_type='rec', object_uuid=record_uuid, status=PIDStatus.REGISTERED, ) data.update({ '_deposit': { 'id': depid.pid_value, 'status': 'draft', }, }) return depid
def test_delete(logger, app): """Test pid delete.""" with app.app_context(): i = 1 for s in [ PIDStatus.RESERVED, PIDStatus.RESERVED, PIDStatus.REDIRECTED, PIDStatus.DELETED ]: pid = PersistentIdentifier.create('rec', str(i), status=s) i += 1 assert pid.delete() assert logger.info.call_args[0][0] == "Deleted PID." # New persistent identifiers are removed completely count = PersistentIdentifier.query.count() pid = PersistentIdentifier.create('rec', str(i), status=PIDStatus.NEW) db.session.commit() assert PersistentIdentifier.query.count() == count + 1 pid.delete() assert PersistentIdentifier.query.count() == count assert logger.info.call_args[0][0] == "Deleted PID (removed)." pid = PersistentIdentifier.create('rec', str(i + 1)) with patch('invenio_pidstore.models.db.session.begin_nested') as mock: mock.side_effect = SQLAlchemyError() pytest.raises(SQLAlchemyError, pid.delete) assert logger.exception.call_args[0][0].startswith( "Failed to delete") assert 'pid' in logger.exception.call_args[1]['extra']
def create_pids(cls, dump, deposit): """Create a persistent identifiers.""" # Mark deposit deleted if recid is deleted. recid = dump.recid_pid # Create depid depid = PersistentIdentifier.create(pid_type='depid', pid_value=str(dump.depid), object_type='rec', object_uuid=deposit.id, status=PIDStatus.REGISTERED) if recid and recid.status == PIDStatus.DELETED: depid.delete() if RecordIdentifier.query.get(dump.depid) is None: RecordIdentifier.insert(dump.depid) # Pre-reserved recid. if not recid and dump.recid: if dump.has_pid: # Published deposit without a recid (this is an upload which # never got ingested so we set it back to draft status and # reserves the reid). pass recid = PersistentIdentifier.create(pid_type='recid', pid_value=str(dump.recid), status=PIDStatus.RESERVED) if RecordIdentifier.query.get(dump.recid) is None: RecordIdentifier.insert(dump.recid) return depid, recid
def test_record_page(app, db, es, event_queues, full_record): """Test record page views.""" full_record['conceptdoi'] = '10.1234/foo.concept' full_record['conceptrecid'] = 'foo.concept' r = Record.create(full_record) PersistentIdentifier.create('recid', '12345', object_type='rec', object_uuid=r.id, status=PIDStatus.REGISTERED) db.session.commit() with app.test_client() as client: record_url = url_for('invenio_records_ui.recid', pid_value='12345') assert client.get(record_url).status_code == 200 process_events(['record-view']) current_search.flush_and_refresh(index='events-stats-record-view') search = Search(using=es, index='events-stats-record-view') assert search.count() == 1 doc = search.execute()[0] assert doc['doi'] == '10.1234/foo.bar' assert doc['conceptdoi'] == '10.1234/foo.concept' assert doc['recid'] == '12345' assert doc['conceptrecid'] == 'foo.concept' assert doc['resource_type'] == {'type': 'publication', 'subtype': 'book'} assert doc['access_right'] == 'open' assert doc['communities'] == ['zenodo'] assert doc['owners'] == [1]
def grant_records(db, funder_record): """Create grant records.""" grants = [ Record.create(dict( internal_id='10.13039/501100000780::282896', funder={'$ref': 'https://dx.doi.org/10.13039/501100000780'}, identifiers=dict( eurepo='info:eu-repo/grantAgreement/EC/FP7/282896', ), code='282896', title='Open Access Research Infrastructure in Europe', acronym='OpenAIREplus', program='FP7', )), Record.create(dict( internal_id='10.13039/501100000780::027819', funder={'$ref': 'https://dx.doi.org/10.13039/501100000780'}, identifiers=dict( eurepo='info:eu-repo/grantAgreement/EC/FP6/027819', ), code='027819', title='Integrating cognition, emotion and autonomy', acronym='ICEA', program='FP6', )), ] for g in grants: PersistentIdentifier.create( pid_type='grant', pid_value=g['internal_id'], object_type='rec', object_uuid=g.id, status='R') db.session.commit() return grants
def test_redirect_cleanup(app): """Test proper clean up from redirects.""" with app.app_context(): pid1 = PersistentIdentifier.create('recid', '1', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=uuid.uuid4()) pid2 = PersistentIdentifier.create('recid', '2', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=uuid.uuid4()) pid3 = PersistentIdentifier.create('recid', '3', status=PIDStatus.REGISTERED) db.session.commit() assert Redirect.query.count() == 0 pid3.redirect(pid1) assert Redirect.query.count() == 1 pid3.redirect(pid2) assert Redirect.query.count() == 1 pytest.raises(PIDObjectAlreadyAssigned, pid3.assign, 'rec', uuid.uuid4()) pid3.unassign() assert Redirect.query.count() == 0
def test_many_redirections_to_the_same_pid(inspire_app): pid_1 = PersistentIdentifier.create( pid_type="a", pid_value="1", status=PIDStatus.REGISTERED, object_uuid=uuid.uuid4(), ) pid_2 = PersistentIdentifier.create( pid_type="a", pid_value="2", status=PIDStatus.REGISTERED, object_uuid=uuid.uuid4(), ) pid_3 = PersistentIdentifier.create( pid_type="a", pid_value="3", status=PIDStatus.REGISTERED, object_uuid=uuid.uuid4(), ) InspireRedirect.redirect(pid_1, pid_2) InspireRedirect.redirect(pid_3, pid_2) assert pid_1.status == PIDStatus.REDIRECTED assert pid_3.status == PIDStatus.REDIRECTED assert len(pid_2.redirected_pids) == 2
def weko_deposit_minter(record_uuid, data, recid=None): """Weko deposit.""" if not recid: id_ = RecordIdentifier.next() else: if isinstance(recid, int): RecordIdentifier.insert(recid) id_ = recid recid = PersistentIdentifier.create('recid', str(id_), object_type='rec', object_uuid=record_uuid, status=PIDStatus.REGISTERED) data['recid'] = str(recid.pid_value) # Create depid with same pid_value of the recid depid = PersistentIdentifier.create( 'depid', str(recid.pid_value), object_type='rec', object_uuid=record_uuid, status=PIDStatus.REGISTERED, ) data.update({ '_deposit': { 'id': depid.pid_value, 'status': 'draft', }, }) return depid
def test_delete(logger, app): """Test pid delete.""" with app.app_context(): i = 1 for s in [PIDStatus.RESERVED, PIDStatus.RESERVED, PIDStatus.REDIRECTED, PIDStatus.DELETED]: pid = PersistentIdentifier.create('rec', str(i), status=s) i += 1 assert pid.delete() assert logger.info.call_args[0][0] == "Deleted PID." # New persistent identifiers are removed completely count = PersistentIdentifier.query.count() pid = PersistentIdentifier.create('rec', str(i), status=PIDStatus.NEW) db.session.commit() assert PersistentIdentifier.query.count() == count + 1 pid.delete() assert PersistentIdentifier.query.count() == count assert logger.info.call_args[0][0] == "Deleted PID (removed)." pid = PersistentIdentifier.create('rec', str(i+1)) with patch('invenio_pidstore.models.db.session.begin_nested') as mock: mock.side_effect = SQLAlchemyError() pytest.raises(SQLAlchemyError, pid.delete) assert logger.exception.call_args[0][0].startswith( "Failed to delete") assert 'pid' in logger.exception.call_args[1]['extra']
def test_transfer_cp(db): """Test factories.transfer_cp function.""" # first we create a record recid = uuid.uuid4() PersistentIdentifier.create( 'recid', '1337', object_type='rec', object_uuid=recid, status=PIDStatus.REGISTERED) record = Record.create({'title': 'record test'}, recid) # we setup a file storage tmppath = tempfile.mkdtemp() db.session.add(Location(name='default', uri=tmppath, default=True)) db.session.commit() # we add a file to the record bucket = Bucket.create() content = b'Aaah! A headcrab!!!\n' record_buckets = RecordsBuckets.create(record=record.model, bucket=bucket) record.files['crab.txt'] = BytesIO(content) # test! rec_dir = join(tmppath, create_accessioned_id('1337', 'recid')) factories.transfer_cp(record.id, tmppath) assert isdir(rec_dir) assert isfile(join(rec_dir, 'crab.txt')) with open(join(rec_dir, 'crab.txt'), "r") as f: assert f.read() == content # finalization rmtree(tmppath)
def mint(cls, object_uuid, data): """Mint a CNUM identifier for a conference record This method calculates the next CNUM identifier according to the conference schema. In case a CNUM is already present in the metadata, the minter registers a new pid for it. This can happen when conference records are migrated from legacy. This method doesn't handle the case where conference records are created with the flag `deleted` set to True. Args: object_uuid (str): the record uuid data (dict): the record's metadata Returns: Minter: an instance of this class used for minting the CNUM. """ minter = cls(object_uuid, data) minter.validate() if "cnum" not in data: cnum_provider = minter.create(data) if cnum_provider: cnum = cnum_provider.pid.pid_value data["cnum"] = cnum else: # migrated record already have a CNUM identifier in metadata PersistentIdentifier.create( pid_type="cnum", pid_value=data["cnum"], object_uuid=object_uuid, object_type="rec", status=PIDStatus.REGISTERED, ) return minter
def test_file_download_ui(app, objects): """Test get buckets.""" app.config.update(dict( FILES_REST_PERMISSION_FACTORY=lambda *a, **kw: type( 'Allow', (object, ), {'can': lambda self: True} )(), RECORDS_UI_DEFAULT_PERMISSION_FACTORY=None, # No permission checking RECORDS_UI_ENDPOINTS=dict( recid=dict( pid_type='recid', route='/records/<pid_value>', ), recid_files=dict( pid_type='recid', route='/records/<pid_value>/files/<filename>', view_imp='invenio_records_files.utils:file_download_ui', record_class='invenio_records_files.api:Record', ), ) )) InvenioRecordsUI(app) obj1 = objects[0] with app.test_request_context(): # Record 1 - Live record rec_uuid = uuid.uuid4() PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) record = Record.create({ 'title': 'Registered', 'recid': 1, '_files': [ {'key': obj1.key, 'bucket': str(obj1.bucket_id), 'checksum': 'invalid'}, ] }, id_=rec_uuid) RecordsBuckets.create(record=record.model, bucket=obj1.bucket) db.session.commit() main_url = url_for('invenio_records_ui.recid', pid_value='1') file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename=obj1.key) no_file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename='') invalid_file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename='no') with app.test_client() as client: res = client.get(main_url) assert res.status_code == 200 res = client.get(file_url) assert res.status_code == 200 res = client.get(no_file_url) assert res.status_code == 404 res = client.get(invalid_file_url) assert res.status_code == 404
def test_publish_deleted_published(app, db, schemas): TestDraftRecord.schema = schemas['draft'] TestPublishedRecord.schema = schemas['published'] with db.session.begin_nested(): published_uuid = uuid.uuid4() published_record = TestPublishedRecord.create( { 'id': '1', 'title': '11', '$schema': 'records/record-v1.0.0.json' }, id_=published_uuid) published_pid = PersistentIdentifier.create( pid_type='recid', pid_value='1', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=published_uuid) assert published_record.revision_id == 0 draft_uuid = uuid.uuid4() rec = TestDraftRecord.create({ 'id': '1', 'title': '22' }, id_=draft_uuid) draft_pid = PersistentIdentifier.create(pid_type='drecid', pid_value='1', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=draft_uuid) with db.session.begin_nested(): published_record.delete() published_pid.status = PIDStatus.DELETED db.session.add(published_pid) with db.session.begin_nested(): rec = TestDraftRecord.get_record(draft_uuid) draft_pid = PersistentIdentifier.get(pid_type='drecid', pid_value='1') with disable_test_authenticated(): current_drafts.publish( RecordContext(record=rec, record_pid=draft_pid)) with db.session.begin_nested(): # draft should be gone draft_pid = PersistentIdentifier.get(pid_type='drecid', pid_value='1') assert draft_pid.status == PIDStatus.DELETED rec = TestDraftRecord.get_record(draft_uuid, with_deleted=True) assert rec.model.json is None published_pid = PersistentIdentifier.get(pid_type='recid', pid_value='1') assert published_pid.status == PIDStatus.REGISTERED rec = TestPublishedRecord.get_record(published_pid.object_uuid) assert rec['title'] == '22' # revision 0 original, 1 deleted, 2 temporarily reverted to orig, 3 published assert rec.revision_id == 3
def process_item(record, resync, counter): """Process item.""" event_counter('processed_items', counter) event = ItemEvents.INIT xml = etree.tostring(record, encoding='utf-8').decode() mapper = JPCOARMapper(xml) resyncid = PersistentIdentifier.query.filter_by( pid_type='syncid', pid_value=gen_resync_pid_value(resync, mapper.identifier())).first() if resyncid: r = RecordMetadata.query.filter_by(id=resyncid.object_uuid).first() recid = PersistentIdentifier.query.filter_by( pid_type='recid', object_uuid=resyncid.object_uuid).first() recid.status = PIDStatus.REGISTERED pubdate = dateutil.parser.parse( r.json['pubdate']['attribute_value']).date() dep = WekoDeposit(r.json, r) indexes = dep['path'].copy() event = ItemEvents.UPDATE elif mapper.is_deleted(): return else: dep = WekoDeposit.create({}) PersistentIdentifier.create(pid_type='syncid', pid_value=gen_resync_pid_value( resync, mapper.identifier()), status=PIDStatus.REGISTERED, object_type=dep.pid.object_type, object_uuid=dep.pid.object_uuid) indexes = [] event = ItemEvents.CREATE indexes.append(str(resync.index_id)) if str( resync.index_id) not in indexes else None if mapper.is_deleted(): soft_delete(recid.pid_value) event = ItemEvents.DELETE else: json = mapper.map() json['$schema'] = '/items/jsonschema/' + str(mapper.itemtype.id) dep['_deposit']['status'] = 'draft' dep.update({'actions': 'publish', 'index': indexes}, json) dep.commit() dep.publish() # add item versioning pid = PersistentIdentifier.query.filter_by( pid_type='recid', pid_value=dep.pid.pid_value).first() with current_app.test_request_context() as ctx: first_ver = dep.newversion(pid) first_ver.publish() db.session.commit() if event == ItemEvents.CREATE: event_counter('created_items', counter) elif event == ItemEvents.UPDATE: event_counter('updated_items', counter) elif event == ItemEvents.DELETE: event_counter('deleted_items', counter)
def test_draft_record_deleted_draft(app, db, schemas): TestDraftRecord.schema = schemas['draft'] TestPublishedRecord.schema = schemas['published'] with db.session.begin_nested(): published_uuid = uuid.uuid4() published_record = TestPublishedRecord.create( { 'id': '1', 'title': '11' }, id_=published_uuid) published_pid = PersistentIdentifier.create( pid_type='recid', pid_value='1', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=published_uuid) assert published_record.revision_id == 0 draft_uuid = uuid.uuid4() draft_record = TestDraftRecord.create({ 'id': '1', 'title': '22' }, id_=draft_uuid) draft_pid = PersistentIdentifier.create(pid_type='drecid', pid_value='1', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=draft_uuid) assert draft_record.revision_id == 0 with db.session.begin_nested(): draft_record.delete() draft_pid.status = PIDStatus.DELETED db.session.add(draft_pid) with db.session.begin_nested(): with disable_test_authenticated(): current_drafts.edit( RecordContext(record=published_record, record_pid=published_pid)) # published version should be there unchanged published_pid = PersistentIdentifier.get(pid_type='recid', pid_value='1') assert published_pid.status == PIDStatus.REGISTERED rec = TestDraftRecord.get_record(published_uuid, with_deleted=True) assert rec['title'] == '11' assert rec.revision_id == 0 # draft version should be there unchanged draft_pid = PersistentIdentifier.get(pid_type='drecid', pid_value='1') assert draft_pid.status == PIDStatus.REGISTERED rec = TestDraftRecord.get_record(draft_pid.object_uuid) assert rec.model.json is not None assert rec['title'] == '11' assert rec.revision_id == 4
def test_funder_ep_resolving(app, db): """Test funder resolving through entry point-registered JSON resolver.""" json1 = {"internal_id": "10.13039/001", "parent": "", "name": "Foo"} json2 = {"internal_id": "10.13039/002", "parent": {"$ref": "http://dx.doi.org/10.13039/001"}, "name": "Bar"} r1 = R.create(json1) PID.create("frdoi", json1["internal_id"], object_type="rec", object_uuid=r1.id, status=PIDStatus.REGISTERED) r2 = R.create(json2) PID.create("frdoi", json2["internal_id"], object_type="rec", object_uuid=r2.id, status=PIDStatus.REGISTERED) assert r2.replace_refs()["parent"] == json1
def test_isolated_app_fixture_rollback(isolated_app): pids_count = PersistentIdentifier.query.count() PersistentIdentifier.create( pid_type='type1', pid_value='value1', ) db.session.rollback() assert PersistentIdentifier.query.count() == pids_count
def test_app_fixture_lacks_db_isolation_step1(pids_count, app): assert PersistentIdentifier.query.count() == pids_count PersistentIdentifier.create( pid_type='type1', pid_value='value1', ) # The #PIDs must have incremented. assert PersistentIdentifier.query.count() == pids_count + 1
def legacy_recid_minter(legacy_recid, legacy_pid_type, uuid): """Legacy_recid minter.""" PersistentIdentifier.create( pid_type=legacy_pid_type, pid_value=legacy_recid, object_type="rec", object_uuid=uuid, status=PIDStatus.REGISTERED, )
def test_tombstone(app): """Test tomstones.""" with app.app_context(): # OK PID pid_ok, record = create_record({'title': 'test'}) # Deleted PID pid_del, record = create_record({'title': 'deleted'}) pid_del.delete() # Missing object PID pid_noobj = PersistentIdentifier.create( 'recid', '100', status=PIDStatus.REGISTERED) db.session.commit() # Redirected PID pid_red = PersistentIdentifier.create( 'recid', '101', status=PIDStatus.REGISTERED) pid_red.redirect(pid_ok) # Redirect PID - different endpoint pid_doi = PersistentIdentifier.create( 'doi', '10.1234/foo', status=PIDStatus.REGISTERED) pid_red_doi = PersistentIdentifier.create( 'recid', '102', status=PIDStatus.REGISTERED) pid_red_doi.redirect(pid_doi) db.session.commit() with app.test_client() as client: # PID deleted headers = [('Accept', 'application/json')] res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid_del.pid_value), headers=headers) assert res.status_code == 410 # PID missing object res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid_noobj.pid_value), headers=headers) assert res.status_code == 500 # Redirected invalid endpoint res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid_red_doi.pid_value), headers=headers) assert res.status_code == 500 # Redirected res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid_red.pid_value), headers=headers) assert res.status_code == 301
def test_tombstone(app): """Test tomstones.""" with app.app_context(): # OK PID pid_ok, record = create_record({'title': 'test'}) # Deleted PID pid_del, record = create_record({'title': 'deleted'}) pid_del.delete() # Missing object PID pid_noobj = PersistentIdentifier.create( 'recid', '100', status=PIDStatus.REGISTERED) db.session.commit() # Redirected PID pid_red = PersistentIdentifier.create( 'recid', '101', status=PIDStatus.REGISTERED) pid_red.redirect(pid_ok) # Redirect PID - different endpoint pid_doi = PersistentIdentifier.create( 'doi', '10.1234/foo', status=PIDStatus.REGISTERED) pid_red_doi = PersistentIdentifier.create( 'recid', '102', status=PIDStatus.REGISTERED) pid_red_doi.redirect(pid_doi) db.session.commit() with app.test_client() as client: # PID deleted headers = [('Accept', 'application/json')] res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid_del.pid_value), headers=headers) assert res.status_code == 410 # PID missing object res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid_noobj.pid_value), headers=headers) assert res.status_code == 500 # Redirected invalid endpoint res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid_red_doi.pid_value), headers=headers) assert res.status_code == 500 # Redirected res = client.get( url_for('invenio_records_rest.recid_item', pid_value=pid_red.pid_value), headers=headers) assert res.status_code == 301
def license_record(db, es, sip_metadata_types): """Create a license record.""" licenses = [ Record.create({ "$schema": "https://zenodo.org/schemas/licenses/license-v1.0.0.json", "domain_content": True, "domain_data": True, "domain_software": True, "family": "", "id": "CC-BY-4.0", "maintainer": "Creative Commons", "od_conformance": "approved", "osd_conformance": "not reviewed", "status": "active", "title": "Creative Commons Attribution International 4.0", "url": "https://creativecommons.org/licenses/by/4.0/" }), Record.create({ "$schema": "https://zenodo.org/schemas/licenses/license-v1.0.0.json", "domain_content": True, "domain_data": True, "domain_software": True, "family": "", "id": "CC0-1.0", "maintainer": "Creative Commons", "od_conformance": "approved", "osd_conformance": "not reviewed", "status": "active", "title": "CC0 1.0", "url": "https://creativecommons.org/publicdomain/zero/1.0/" }) ] for license in licenses: PersistentIdentifier.create(pid_type='od_lic', pid_value=license['id'], object_type='rec', object_uuid=license.id, status='R') db.session.commit() for license in licenses: RecordIndexer().index_by_id(license.id) current_search.flush_and_refresh(index='licenses') return licenses[1]
def test_app_fixture_lacks_db_isolation_step1(pids_count, app): assert PersistentIdentifier.query.count() == pids_count PersistentIdentifier.create( pid_type='type1', pid_value='value1', ) # The #PIDs must have incremented. assert PersistentIdentifier.query.count() == pids_count + 1
def test_redirect(logger, app): """Test redirection.""" with app.app_context(): pid1 = PersistentIdentifier.create('rec', '1', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=uuid.uuid4()) pid2 = PersistentIdentifier.create('doi', '2', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=uuid.uuid4()) # Can't redirect these statuses i = 10 for s in [ PIDStatus.NEW, PIDStatus.RESERVED, PIDStatus.DELETED, ]: pid = PersistentIdentifier.create('rec', str(i), status=s) i += 1 pytest.raises(PIDInvalidAction, pid.redirect, pid1) pid = PersistentIdentifier.create('rec', str(i), status=PIDStatus.REGISTERED) # Can't redirect to non-exsting pid. pytest.raises(PIDDoesNotExistError, pid.redirect, PersistentIdentifier()) pid.redirect(pid1) assert logger.info.call_args[0][0].startswith("Redirected") assert 'pid' in logger.info.call_args[1]['extra'] assert pid.status == PIDStatus.REDIRECTED assert pid.object_type is None assert pid.object_uuid is not None new_pid = pid.get_redirect() assert new_pid.pid_type == 'rec' assert new_pid.pid_value == '1' # You can redirect an already redirected pid pid.redirect(pid2) new_pid = pid.get_redirect() assert new_pid.pid_type == 'doi' assert new_pid.pid_value == '2' # Assign with SQLError with patch('invenio_pidstore.models.db.session.begin_nested') as mock: mock.side_effect = SQLAlchemyError() pytest.raises(SQLAlchemyError, pid.redirect, '1') assert logger.exception.call_args[0][0].startswith( "Failed to redirect") assert 'pid' in logger.exception.call_args[1]['extra']
def test_file_download_ui(base_app, objects, db): """Test get buckets.""" app = base_app app.config.update(dict( RECORDS_UI_DEFAULT_PERMISSION_FACTORY=None, # No permission checking RECORDS_UI_ENDPOINTS=dict( recid=dict( pid_type='recid', route='/records/<pid_value>', ), recid_files=dict( pid_type='recid', route='/records/<pid_value>/files/<filename>', view_imp='invenio_files_rest.views.file_download_ui', ), ) )) InvenioRecords(app) InvenioPIDStore(app) InvenioRecordsUI(app) obj1 = objects[0] with app.app_context(): # Record 1 - Live record rec_uuid = uuid.uuid4() PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered', 'recid': 1, 'files': [ {'filename': obj1.key, 'bucket': str(obj1.bucket_id), 'checksum': 'invalid'}, ] }, id_=rec_uuid) db.session.commit() main_url = url_for('invenio_records_ui.recid', pid_value='1') file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename=obj1.key) no_file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename='') invalid_file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename='no') with app.test_client() as client: res = client.get(main_url) assert res.status_code == 200 res = client.get(file_url) assert res.status_code == 200 res = client.get(no_file_url) assert res.status_code == 404 res = client.get(invalid_file_url) assert res.status_code == 404
def legacy_recid_minter(legacy_recid, uuid): """Legacy_recid minter.""" legacy_pid_type = current_app.config["CDS_ILS_RECORD_LEGACY_PID_TYPE"] PersistentIdentifier.create( pid_type=legacy_pid_type, pid_value=legacy_recid, object_type="rec", object_uuid=uuid, status=PIDStatus.REGISTERED, )
def test_filter_uuid(app): """Test FilterUUID.""" with app.app_context(): myuuid = uuid.uuid4() PersistentIdentifier.create( 'doi', '10.1234/a', object_type='tst', object_uuid=myuuid) query = FilterUUID(PersistentIdentifier.object_uuid, 'Test').apply( PersistentIdentifier.query, str(myuuid), None) assert query.count() == 1
def mint_record_pid(pid_type, pid_field, record): """Mint the given PID for the given record.""" PersistentIdentifier.create( pid_type=pid_type, pid_value=record[pid_field], object_type="rec", object_uuid=record.id, status=PIDStatus.REGISTERED, ) db.session.commit()
def test_filter_uuid(app, db): """Test FilterUUID.""" with app.app_context(): myuuid = uuid.uuid4() PersistentIdentifier.create( 'doi', '10.1234/a', object_type='tst', object_uuid=myuuid) query = FilterUUID(PersistentIdentifier.object_uuid, 'Test').apply( PersistentIdentifier.query, str(myuuid), None) assert query.count() == 1
def minter(pid_type, pid_field, record): """Mint the given PID for the given record.""" PersistentIdentifier.create( pid_type=pid_type, pid_value=record[pid_field], object_type="rec", object_uuid=record.id, status=PIDStatus.REGISTERED, ) RecordIdentifier.next()
def test_file_permissions(app, db, test_object, # fixtures user, access_right, expected): """Test file permissions.""" # Create test users admin = User(email='*****@*****.**', password='******') owner = User(email='*****@*****.**', password='******') auth = User(email='*****@*****.**', password='******') db.session.add_all([admin, owner, auth]) db.session.add( ActionUsers.allow(ActionNeed('admin-access'), user=admin) ) # Create test record rec_uuid = uuid.uuid4() PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED ) Record.create({ 'recid': 1, 'owners': [2], 'access_right': access_right, '_files': [ { 'key': test_object.key, 'bucket': str(test_object.bucket_id), 'checksum': 'invalid' }, ] }, id_=rec_uuid) db.session.add( RecordsBuckets(record_id=rec_uuid, bucket_id=test_object.bucket_id) ) file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename=test_object.key ) db.session.commit() with app.test_client() as client: if user: # Login as user with client.session_transaction() as sess: sess['user_id'] = User.query.filter_by( email='{}@zenodo.org'.format(user)).one().id sess['_fresh'] = True res = client.get(file_url) assert res.status_code == expected
def test_template_filters(app): """Test the template filters.""" with app.app_context(): # Test the 'pid_exists' template filter pid_exists = app.jinja_env.filters['pid_exists'] assert not pid_exists('pid_val0', pidtype='mock_t') PersistentIdentifier.create('mock_t', 'pid_val0') db.session.commit() assert pid_exists('pid_val0', pidtype='mock_t') assert not pid_exists('foo', pidtype='mock_t') assert not pid_exists('pid_val0', pidtype='foo')
def test_build_recid_to_uuid_map_numeric_pid_allowed_for_lit_and_con(isolated_app): pid1 = PersistentIdentifier.create(pid_type='lit', pid_value='123', object_type='rec', object_uuid=uuid.uuid4()) pid2 = PersistentIdentifier.create(pid_type='con', pid_value='1234', object_type='rec', object_uuid=uuid.uuid4()) citations_lookup = { pid1.pid_value: 5, pid2.pid_value: 6, } result = _build_recid_to_uuid_map(citations_lookup) assert result.keys().sort() == [pid1.object_uuid, pid2.object_uuid].sort()
def minter(record_uuid, data, pid_type, key): """Mint PIDs for a record.""" pid = PersistentIdentifier.create( pid_type, data[key], object_type="rec", object_uuid=record_uuid, status=PIDStatus.REGISTERED ) for scheme, identifier in data["identifiers"].items(): if identifier: PersistentIdentifier.create( scheme, identifier, object_type="rec", object_uuid=record_uuid, status=PIDStatus.REGISTERED ) return pid
def test_template_filters(app): """Test the template filters.""" with app.app_context(): # Test the 'pid_exists' template filter pid_exists = app.jinja_env.filters["pid_exists"] assert not pid_exists("pid_val0", pidtype="mock_t") PersistentIdentifier.create("mock_t", "pid_val0") db.session.commit() assert pid_exists("pid_val0", pidtype="mock_t") assert not pid_exists("foo", pidtype="mock_t") assert not pid_exists("pid_val0", pidtype="foo")
def test_repr(app): """Test representation.""" with app.app_context(): pid = PersistentIdentifier.create( 'recid', '1', status=PIDStatus.REGISTERED, object_type='rec', object_uuid='de3bb351-bc1a-4e51-8605-c6cd9589a560') assert str(pid) == \ "<PersistentIdentifier recid:1 / " \ "rec:de3bb351-bc1a-4e51-8605-c6cd9589a560 (R)>" pid = PersistentIdentifier.create( 'recid', '2', status=PIDStatus.REGISTERED) assert str(pid) == "<PersistentIdentifier recid:2 (R)>"
def create_pids(record_uuid, pids): """Create persistent identifiers.""" for p in pids: PersistentIdentifier.create( pid_type=p.pid_type, pid_value=p.pid_value, pid_provider=p.provider.pid_provider if p.provider else None, object_type='rec', object_uuid=record_uuid, status=PIDStatus.REGISTERED, ) db.session.commit()
def funder_record(db): """Create a funder record.""" funder = Record.create(dict( doi='10.13039/501100000780', name='European Commission', acronyms=['EC'], )) PersistentIdentifier.create( pid_type='frdoi', pid_value=funder['doi'], object_type='rec', object_uuid=funder.id, status='R') db.session.commit() return funder
def migrate_record(record_uuid, logger=None): """Migrate a record.""" try: # Migrate record record = Record.get_record(record_uuid) if '$schema' in record: if logger: logger.info("Record already migrated.") return record = transform_record(record) provisional_communities = record.pop('provisional_communities', None) record.commit() # Create provisional communities. if provisional_communities: for c_id in provisional_communities: try: c = Community.get(c_id) if c: InclusionRequest.create(c, record, notify=False) else: if logger: logger.warning( "Community {0} does not exists " "(record {1}).".format( c_id, str(record.id))) except InclusionRequestExistsError: if logger: logger.warning("Inclusion request exists.") # Register DOI doi = record.get('doi') if doi: is_internal = doi.startswith('10.5281') PersistentIdentifier.create( pid_type='doi', pid_value=doi, pid_provider='datacite' if is_internal else None, object_type='rec', object_uuid=record_uuid, status=( PIDStatus.REGISTERED if is_internal else PIDStatus.RESERVED), ) db.session.commit() except NoResultFound: if logger: logger.info("Deleted record - no migration required.") except Exception: db.session.rollback() pid = PersistentIdentifier.get_by_object('recid', 'rec', record_uuid) pid.status = PIDStatus.RESERVED db.session.commit() raise
def test_remote_openaire_loader(app, db): """Test the remote OAI-PMH OpenAIRE loader.""" loader = RemoteOAIRELoader() pytest.raises(OAIRELoadingError, list, loader.iter_grants()) recuuid = uuid.uuid4() PersistentIdentifier.create( 'frdoi', '10.13039/501100000925', object_type='rec', object_uuid=recuuid, status='R') Record.create({'acronyms': ['EC']}, id_=recuuid) records = list(loader.iter_grants()) assert len(records) == 5
def minimal_record_model(db, minimal_record, sip_metadata_types): """Minimal record.""" model = RecordMetadata() model.created = datetime.utcnow() - timedelta(days=1) model.updated = model.created + timedelta(days=1) model.version_id = 0 rec = ZenodoRecord(minimal_record, model=model) PersistentIdentifier.create( 'recid', '123', status=PIDStatus.REGISTERED, object_type='rec', object_uuid=rec.id) db.session.commit() return rec
def test_records_ui_export(app, db, full_record): """Test export pages.""" r = Record.create(full_record) PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=r.id, status=PIDStatus.REGISTERED) db.session.commit() formats = app.config['ZENODO_RECORDS_EXPORTFORMATS'] with app.test_client() as client: for f, val in formats.items(): res = client.get(url_for( 'invenio_records_ui.recid_export', pid_value='1', format=f)) assert res.status_code == 410 if val is None else 200
def test_object_formatter(app): """Test FilterUUID.""" @app.route('/<id>') def test_detail(id=None): return str(id) with app.test_request_context(): app.config['PIDSTORE_OBJECT_ENDPOINTS']['tst'] = 'test_detail' pid = PersistentIdentifier.create( 'doi', '10.1234/a', object_type='tst', object_uuid=uuid.uuid4()) assert 'View' in object_formatter(None, None, pid, None) pid = PersistentIdentifier.create( 'doi', '10.1234/b', ) assert object_formatter(None, None, pid, None) == ''
def test_oaire_dumper(db, sqlite_tmpdb): recuuid = uuid.uuid4() PersistentIdentifier.create( 'frdoi', '10.13039/501100000925', object_type='rec', object_uuid=recuuid, status='R') Record.create({'acronyms': ['EC']}, id_=recuuid) dumper = OAIREDumper(destination=sqlite_tmpdb) # We expect to harvest 5 record from the MockSickle. # with 'commit_batch_size=2', we will make 3 commits to sqlite db dumper.dump(commit_batch_size=2) loader = LocalOAIRELoader(source=sqlite_tmpdb) records = list(loader.iter_grants()) assert len(records) == 5
def records(): """Load test data fixture.""" import uuid from invenio_records.api import Record from invenio_pidstore.models import PersistentIdentifier, PIDStatus create_test_user() indexer = RecordIndexer() # Record 1 - Live record with db.session.begin_nested(): rec_uuid = uuid.uuid4() pid1 = PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered', 'description': 'This is an awesome description', 'control_number': '1', 'access_right': 'restricted', 'access_conditions': 'fuu', 'owners': [1, 2], 'recid': 1 }, id_=rec_uuid) indexer.index_by_id(pid1.object_uuid) db.session.commit() sleep(3)
def zenodo_doi_updater(record_uuid, data): """Update the DOI (only external DOIs).""" assert 'recid' in data doi = data.get('doi') assert doi assert idutils.is_doi(doi) # If the DOI is the same as an already generated one, do nothing if doi == doi_generator(data['recid']): return if is_local_doi(doi): # Zenodo DOI, but different than recid # ERROR, user provided a custom ZENODO DOI! raise PIDValueError('doi', doi) doi_pid = PersistentIdentifier.get_by_object( pid_type='doi', object_type='rec', object_uuid=record_uuid) if doi_pid.pid_value != doi: with db.session.begin_nested(): db.session.delete(doi_pid) return PersistentIdentifier.create( 'doi', doi, object_type='rec', object_uuid=record_uuid, status=PIDStatus.RESERVED, )