def test_minter_saves_texkey_on_hidden_collection(inspire_app, override_config): data = { "authors": [{ "full_name": "Janeway, K." }], "publication_info": [{ "year": 2000 }], "_collections": ["HAL Hidden"], } record_data = faker.record("lit", data=data) with override_config(FEATURE_FLAG_ENABLE_TEXKEY_MINTER=False): record = LiteratureRecord.create(data=record_data) db.session.commit() rec_obj = LiteratureRecord.get_record_by_pid_value( record["control_number"]) assert "texkeys" not in rec_obj with override_config(FEATURE_FLAG_ENABLE_TEXKEY_MINTER=True): record = LiteratureRecord.get_record_by_pid_value( record["control_number"]) data = dict(record) record.update(data) assert len(record["texkeys"]) == 1 db.session.commit() rec_obj = LiteratureRecord.get_record_by_pid_value( record["control_number"]) assert len(rec_obj["texkeys"]) == 1
def test_literature_json_put_redirected_record(inspire_app): token = create_user_and_token() headers = { "Authorization": "BEARER " + token.access_token, "If-Match": '"2"' } record_redirected = create_record("lit") record = create_record( "lit", data={"deleted_records": [record_redirected["self"]]}) data = dict(record_redirected) data["deleted"] = True with inspire_app.test_client() as client: response = client.put( "/literature/{}".format(record_redirected.control_number), headers=headers, json=data, ) assert response.status_code == 200 redirected_record_from_db = LiteratureRecord.get_record_by_pid_value( record_redirected.control_number, original_record=True) record_from_db = LiteratureRecord.get_record_by_pid_value( record.control_number) assert dict(redirected_record_from_db) == data assert dict(record_from_db) == dict(record)
def test_literature_export_to_cds_view(inspire_app): cataloger = create_user(role=Roles.cataloger.value) literature1 = create_record("lit") literature2 = create_record( "lit", data={"_export_to": { "CDS": False, "HAL": False }}) expected_status_code = 200 with inspire_app.test_client() as client: login_user_via_session(client, email=cataloger.email) response = client.post( "/assign/export-to-cds", data=orjson.dumps({ "literature_recids": [ literature1.control_number, literature2.control_number, ] }), content_type="application/json", ) response_status_code = response.status_code assert response_status_code == expected_status_code literature1 = LiteratureRecord.get_record_by_pid_value( literature1.control_number) literature2 = LiteratureRecord.get_record_by_pid_value( literature2.control_number) assert literature1["_export_to"] == {"CDS": True} assert literature2["_export_to"] == {"CDS": True, "HAL": False}
def test_authors_signature_blocks_and_uuids_added_after_create_and_update( app, clear_environment): data = { "$schema": "http://localhost:5000/schemas/records/hep.json", "titles": [{ "title": "Test a valid record" }], "document_type": ["article"], "_collections": ["Literature"], "authors": [{ "full_name": "Doe, John" }], } record = LiteratureRecord.create(data) db.session.commit() record_control_number = record["control_number"] db_record = LiteratureRecord.get_record_by_pid_value(record_control_number) expected_signature_block = "Dj" assert expected_signature_block == db_record["authors"][0][ "signature_block"] assert "uuid" in db_record["authors"][0] expected_signature_block = "ELj" data.update({"authors": [{"full_name": "Ellis, Jane"}]}) record.update(data) db.session.commit() record_updated = LiteratureRecord.get_record_by_pid_value( record_control_number) assert expected_signature_block == record_updated["authors"][0][ "signature_block"]
def test_redirect_and_delete_many_records_from_deleted_records_field( inspire_app): records_to_delete = [create_record("lit") for _ in range(2)] record = create_record("lit") data = dict(record) data["deleted_records"] = [record["self"] for record in records_to_delete] record.update(data) deleted_records = record["deleted_records"] assert len(deleted_records) == 2 old_pid_1 = records_to_delete[0].control_number_pid old_pid_2 = records_to_delete[1].control_number_pid assert old_pid_1.is_redirected() assert old_pid_2.is_redirected() record_redirected_1 = LiteratureRecord.get_record_by_pid_value( records_to_delete[0].control_number) record_redirected_2 = LiteratureRecord.get_record_by_pid_value( records_to_delete[1].control_number) assert record_redirected_1.id == record.id assert record_redirected_2.id == record.id
def test_migrator_deleted_deleted_records_correctly_when_pid_redirection_is_turned_off( inspire_app, clean_celery_session, override_config): raw_record = (b"<record>" b' <controlfield tag="001">98765</controlfield>' b' <datafield tag="024" ind1="7" ind2=" ">' b' <subfield code="9">DOI</subfield>' b' <subfield code="a">10.1000/a_doi</subfield>' b" </datafield>" b' <datafield tag="245" ind1=" " ind2=" ">' b' <subfield code="a">A record to be merged</subfield>' b" </datafield>" b' <datafield tag="980" ind1=" " ind2=" ">' b' <subfield code="a">HEP</subfield>' b" </datafield>" b"</record>") migrate_and_insert_record(raw_record) db.session.commit() record = LiteratureRecord.get_record_by_pid_value("98765") assert PersistentIdentifier.get("doi", "10.1000/a_doi").object_uuid == record.id raw_record = (b"<record>" b' <controlfield tag="001">31415</controlfield>' b' <datafield tag="024" ind1="7" ind2=" ">' b' <subfield code="9">DOI</subfield>' b' <subfield code="a">101000/a_doi</subfield>' b" </datafield>" b' <datafield tag="245" ind1=" " ind2=" ">' b' <subfield code="a">A record that was merged</subfield>' b" </datafield>" b' <datafield tag="980" ind1=" " ind2=" ">' b' <subfield code="a">HEP</subfield>' b" </datafield>" b' <datafield tag="981" ind1=" " ind2=" ">' b' <subfield code="a">98765</subfield>' b" </datafield>" b"</record>") new_config = {"FEATURE_FLAG_ENABLE_REDIRECTION_OF_PIDS": False} with override_config(**new_config): migrate_and_insert_record(raw_record) db.session.commit() new_pid = PersistentIdentifier.query.filter_by( pid_type="lit", pid_value="31415").one_or_none() assert new_pid old_pid = PersistentIdentifier.query.filter_by( pid_type="lit", pid_value="98765").one_or_none() old_record = LiteratureRecord.get_record_by_pid_value(98765) assert old_pid.status == PIDStatus.DELETED assert old_record["deleted"] is True
def test_redirect_ignores_not_existing_pids(inspire_app): record = create_record("lit") data = dict(record) data["deleted_records"] = [{ "$ref": "http://localhost:8080/api/literature/987654321" }] record.update(data) assert (PersistentIdentifier.query.filter_by( pid_type="lit", pid_value="987654321").count() == 0) assert InspireRedirect.query.count() == 0 with pytest.raises(PIDDoesNotExistError): LiteratureRecord.get_record_by_pid_value("987654321")
def test_authors_signature_blocks_and_uuids_added_after_create_and_update( inspire_app, clean_celery_session): data = { "$schema": "http://localhost:5000/schemas/records/hep.json", "titles": [{ "title": "Test a valid record" }], "document_type": ["article"], "_collections": ["Literature"], "authors": [{ "full_name": "Doe, John" }], } record = LiteratureRecord.create(data) db.session.commit() record_control_number = record["control_number"] record = LiteratureRecord.get_record_by_pid_value(record_control_number) expected_signature_block = "Dj" assert expected_signature_block == record["authors"][0]["signature_block"] assert "uuid" in record["authors"][0] expected_versions_len = 1 results = record.model.versions.all() result_latest_version = results[-1].json assert expected_versions_len == len(results) assert result_latest_version == record expected_signature_block = "ELj" data.update({ "authors": [{ "full_name": "Ellis, Jane" }], "control_number": record["control_number"], }) record.update(data) db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record_control_number) assert expected_signature_block == record["authors"][0]["signature_block"] expected_versions_len = 2 results = record.model.versions.all() result_latest_version = results[-1].json assert expected_versions_len == len(results) assert result_latest_version == record
def test_gracefully_handle_records_updating_in_wrong_order( inspire_app, clean_celery_session): # We want to run indexing in weird order, so disable auto indexing models_committed.disconnect(index_after_commit) cited_record = LiteratureRecord.create(data=faker.record("lit")) record_data = faker.record( "lit", literature_citations=[cited_record.control_number]) record = LiteratureRecord.create(data=record_data) db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record.control_number) index_record(record.id, record.model.versions[-1].version_id) assert LiteratureSearch().get_source( cited_record.id)["citation_count"] == 1 data = dict(record) del data["references"] record.update(data) db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record.control_number) data = dict(record) data["titles"][0] = {"title": "New Title"} record.update(data) db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record.control_number) index_record(record.id, record.model.versions[-1].version_id) record = LiteratureRecord.get_record_by_pid_value(record.control_number) assert LiteratureSearch().get_source( cited_record.id)["citation_count"] == 1 assert LiteratureSearch().get_source(record.id)["titles"] == [{ "title": "New Title" }] index_record(record.id, record.model.versions[-2].version_id) assert LiteratureSearch().get_source( cited_record.id)["citation_count"] == 0 assert LiteratureSearch().get_source(record.id)["titles"] == [{ "title": "New Title" }] models_committed.connect(index_after_commit)
def test_get_record_by_pid_value_returns_original_record_when_requested( inspire_app): redirected_record = create_record("lit") record = create_record( "lit", data={"deleted_records": [dict(redirected_record["self"])]}) original_record = LiteratureRecord.get_record_by_pid_value( redirected_record.control_number, original_record=True) new_record = LiteratureRecord.get_record_by_pid_value( redirected_record.control_number) assert original_record.id != new_record.id assert original_record.id == redirected_record.id assert new_record.id == record.id
def test_conference_paper_get_updated_reference_conference_when_updates_one_conference( inspire_app, clean_celery_session): conference_1 = ConferencesRecord.create(faker.record("con")) conference_1_control_number = conference_1["control_number"] ref_1 = f"http://localhost:8000/api/conferences/{conference_1_control_number}" conference_2 = ConferencesRecord.create(faker.record("con")) conference_2_control_number = conference_2["control_number"] ref_2 = f"http://localhost:8000/api/conferences/{conference_2_control_number}" expected_result = [conference_2.id] data = { "publication_info": [{ "conference_record": { "$ref": ref_1 } }], "document_type": ["conference paper"], } record = LiteratureRecord.create(faker.record("lit", data)) record_control_number = record["control_number"] db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record_control_number) expected_versions_len = 1 results = record.model.versions.all() result_latest_version = results[-1].json assert expected_versions_len == len(results) assert result_latest_version == record data = copy.deepcopy(dict(record)) data["publication_info"].append({"conference_record": {"$ref": ref_2}}) record.update(data) db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record_control_number) assert expected_result == sorted( record.get_newest_linked_conferences_uuid()) expected_versions_len = 2 results = record.model.versions.all() result_latest_version = results[-1].json assert expected_versions_len == len(results) assert result_latest_version == record
def test_cds_sync_continues_when_some_records_fails(inspire_app_for_cds_sync): expected_1203988_external_system_identifiers = [{ "value": "1273173", "schema": "CDS" }] expected_1314109_external_system_identifiers = [{ "value": "1742265", "schema": "CDS" }] expected_1314110_external_system_identifiers = [{ "value": "2003162", "schema": "CDS" }] LiteratureRecord.create( faker.record("lit", data={"control_number": 1203988})) LiteratureRecord.create( faker.record( "lit", data={ "control_number": 1314109, "external_system_identifiers": [{ "schema": "CDS", "value": "1742265" }], }, )) LiteratureRecord.create( faker.record("lit", data={"control_number": 1314110})) db.session.commit() sync_identifiers("2020-07-01") runs = CDSRun.query.all() assert len(runs) == 1 assert runs[0].status == CDSRunStatus.FINISHED assert runs[0].message == "" assert (LiteratureRecord.get_record_by_pid_value( "1203988")["external_system_identifiers"] == expected_1203988_external_system_identifiers) assert (LiteratureRecord.get_record_by_pid_value( "1314109")["external_system_identifiers"] == expected_1314109_external_system_identifiers) assert (LiteratureRecord.get_record_by_pid_value( "1314110")["external_system_identifiers"] == expected_1314110_external_system_identifiers)
def test_record_versioning(app, clear_environment): data = { "$schema": "http://localhost:5000/schemas/records/hep.json", "titles": [{ "title": "Test a valid record" }], "document_type": ["article"], "_collections": ["Literature"], } expected_version_created = 1 expected_count_created = 1 record = LiteratureRecord.create(data) record_control_number = record["control_number"] db.session.commit() assert expected_version_created == record.model.version_id assert expected_count_created == record.model.versions.count() with pytest.raises(AttributeError): record._previous_version expected_version_updated = 2 expected_count_updated = 2 record_updated = LiteratureRecord.get_record_by_pid_value( record_control_number) record_updated.update(dict(record_updated)) db.session.commit() assert expected_version_updated == record_updated.model.version_id assert expected_count_updated == record_updated.model.versions.count() assert record._previous_version
def hal_push(self, recid, record_version_id): """Celery task to push a record to HAL. Args: self (celery.Task): the task recid (Int): inspire record to push to HAL. record_version_id (Int): db version for record that we're trying to push """ LOGGER.info("New hal_push task", recid=recid) try: record = LiteratureRecord.get_record_by_pid_value(recid) if record.model.version_id < record_version_id: raise StaleDataError _hal_push(record) LOGGER.info("hal_push task successfully completed.", recid=recid) editor_soft_lock = EditorSoftLock( recid=record["control_number"], record_version=record.model.version_id, task_name=self.name, ) editor_soft_lock.remove_lock() except Exception as exc: error_message = _get_error_message_from_hal_exception(exc) LOGGER.error("hal_push task failed", recid=recid, message=error_message) raise
def _get_inspire_record(self): try: inspire_record = LiteratureRecord.get_record_by_pid_value(self.recid) except PIDDoesNotExistError as exc: raise exceptions.RecordNotFoundException( "recid={} not found for pid_type=lit".format(self.recid), from_exc=exc ) # If the record_db_version was given, then ensure we are about to push # the right record version. # This check is related to the fact the orcid push at this moment is # triggered by the signal after_record_update (which happens after a # InspireRecord.commit()). This is not the actual commit to the db which # might happen at a later stage or not at all. # Note that connecting to the proper SQLAlchemy signal would also # have issues: https://github.com/mitsuhiko/flask-sqlalchemy/issues/645 if ( self.record_db_version and inspire_record.model.version_id < self.record_db_version ): raise exceptions.StaleRecordDBVersionException( "Requested push for db version={}, but actual record db" " version={}".format( self.record_db_version, inspire_record.model.version_id ) ) return inspire_record
def test_redirected_records_are_not_counted_into_citations(inspire_app): record_1 = create_record("lit") # Cited record record_redirected = create_record( "lit", literature_citations=[record_1["control_number"]] ) # This one cites and then will be redirected record_2 = create_record( "lit", literature_citations=[record_1["control_number"]], data={"deleted_records": [record_redirected["self"]]}, ) # This one redirects and also cites record_redirected_2 = create_record( "lit", literature_citations=[record_1["control_number"]] ) # This one cites and will be redirected create_record( "lit", data={"deleted_records": [record_redirected_2["self"]]} ) # This one redirects but not cites. # At the end we should have record_1 cited by record_2 only. record_1_from_db = LiteratureRecord.get_record_by_pid_value( record_1["control_number"] ) assert record_1_from_db.citation_count == 1 assert record_1_from_db.model.citations[0].citer_id == record_2.id
def test_record_versioning(inspire_app, celery_app_with_context, celery_session_worker): data = { "$schema": "http://localhost:5000/schemas/records/hep.json", "titles": [{ "title": "Test a valid record" }], "document_type": ["article"], "_collections": ["Literature"], } expected_version_created = 3 expected_count_created = 1 record = LiteratureRecord.create(data) record_control_number = record["control_number"] db.session.commit() assert expected_version_created == record.model.version_id assert expected_count_created == record.model.versions.count() assert LiteratureRecord({}) == record._previous_version expected_version_updated = 5 expected_count_updated = 2 record_updated = LiteratureRecord.get_record_by_pid_value( record_control_number) record_updated.update(dict(record_updated)) db.session.commit() assert expected_version_updated == record_updated.model.version_id assert expected_count_updated == record_updated.model.versions.count() assert record._previous_version
def test_cds_sync_record_when_there_is_already_correct_cds_identifier( inspire_app_for_cds_sync, ): expected_external_identifiers = [{"schema": "CDS", "value": "1273173"}] LiteratureRecord.create( faker.record( "lit", data={ "control_number": 1203988, "external_system_identifiers": [{ "schema": "CDS", "value": "1273173" }], }, )) db.session.commit() sync_identifiers("2020-07-01") runs = CDSRun.query.all() assert len(runs) == 1 assert runs[0].status == CDSRunStatus.FINISHED assert runs[0].message == "" record = LiteratureRecord.get_record_by_pid_value("1203988") assert record[ "external_system_identifiers"] == expected_external_identifiers
def test_add_keywords_updated_desy_bookkeeping(inspire_app): user = create_user(role=Roles.cataloger.value) record = create_record( "lit", data={ "_desy_bookkeeping": [ {"date": "2017-10-16", "expert": "3", "status": "printed"} ] }, ) expected_desy_bookkeeping_value = [ {"date": "2017-10-16", "expert": "3", "status": "printed"}, {"identifier": "DA17-kp43aa"}, ] with inspire_app.test_client() as client: login_user_via_session(client, email=user.email) response = client.put( f"/curation/literature/{record['control_number']}/keywords", content_type="application/json", data=orjson.dumps({"_desy_bookkeeping": {"identifier": "DA17-kp43aa"}}), ) updated_record = LiteratureRecord.get_record_by_pid_value(record["control_number"]) assert response.status_code == 200 assert updated_record["_desy_bookkeeping"] == expected_desy_bookkeeping_value
def test_migrate_record_from_mirror_invalidates_local_file_cache_if_no_local_file( inspire_app, s3, redis, datadir ): expected_key = "f43f40833edfd8227c4deb9ad05b321e" create_s3_bucket(expected_key) with patch.dict( current_app.config, {"LABS_AFS_HTTP_SERVICE": "http://inspire-afs-web.cern.ch/"} ): redis.delete("afs_file_locations") # populate cache with invalid file path redis.hset( "afs_file_locations", "http://inspire-afs-web.cern.ch/var/data/files/g97/1940001/content.pdf%3B2", "/api/files/ddb1a354-1d2a-40b6-9cc4-2e823b6bef81/0000000000000000000000000000000000000000", ) raw_record_path = (datadir / "1313624.xml").as_posix() migrate_from_file(raw_record_path) record = LiteratureRecord.get_record_by_pid_value("1313624") assert redis.hlen("afs_file_locations") > 0 assert ( record["documents"][0]["original_url"] == "http://inspire-afs-web.cern.ch/var/data/files/g97/1940001/content.pdf%3B2" )
def test_cds_sync_determines_last_run_date_correctly(inspire_app_for_cds_sync, cli): expected_external_identifiers = [{"schema": "CDS", "value": "123123"}] LiteratureRecord.create( faker.record("lit", data={"control_number": 321321})) db.session.add( CDSRun( task_id=uuid.uuid4(), date=datetime.date(2020, 12, 24), status=CDSRunStatus.FINISHED, )) db.session.add( CDSRun( task_id=uuid.uuid4(), date=datetime.date(2020, 12, 25), status=CDSRunStatus.ERROR, )) db.session.add( CDSRun( task_id=uuid.uuid4(), date=datetime.date(2020, 12, 23), status=CDSRunStatus.RUNNING, )) db.session.commit() cli.invoke(["cds", "sync"]) record = LiteratureRecord.get_record_by_pid_value("321321") assert record.get( "external_system_identifiers") == expected_external_identifiers
def test_get_linked_records_in_field_with_different_pid_types(inspire_app): record_reference_lit = create_record_factory("lit") record_reference_lit_control_number = record_reference_lit.json["control_number"] record_reference_lit_uri = "http://localhost:5000/api/literature/{}".format( record_reference_lit_control_number ) record_reference_aut = create_record_factory("aut") record_reference_aut_control_number = record_reference_aut.json["control_number"] record_reference_aut_uri = "http://localhost:5000/api/authors/{}".format( record_reference_aut_control_number ) data = { "references": [ {"record": {"$ref": record_reference_lit_uri}}, {"record": {"$ref": record_reference_aut_uri}}, ] } record = create_record_factory("lit", data=data) expected_result_len = 2 expected_result = [record_reference_lit.json, record_reference_aut.json] result = LiteratureRecord.get_record_by_pid_value( record.json["control_number"] ).get_linked_records_from_field("references.record") result = list(result) assert expected_result_len == len(result) for record in result: assert record in expected_result
def test_create_record_with_directory(base_app, db, script_info): runner = CliRunner() data_literature = faker.record("lit", with_control_number=True) data_author = faker.record("aut", with_control_number=True) control_number_literature = data_literature["control_number"] control_number_author = data_author["control_number"] with runner.isolated_filesystem(): os.mkdir("test_directory/") with open(f"test_directory/{control_number_literature}.json", "w") as f: f.write(json.dumps(data_literature)) with open(f"test_directory/{control_number_author}.json", "w") as f: f.write(json.dumps(data_author)) result = runner.invoke(importer, ["records", "-d", "test_directory"], obj=script_info) result_record_literature = LiteratureRecord.get_record_by_pid_value( control_number_literature) result_record_author = AuthorsRecord.get_record_by_pid_value( control_number_author) assert result.exit_code == 0 assert control_number_literature == result_record_literature[ "control_number"] assert control_number_author == result_record_author["control_number"]
def test_create_record_with_multiple_files(inspire_app, cli): data_literature = faker.record("lit", with_control_number=True) data_author = faker.record("aut", with_control_number=True) control_number_literature = data_literature["control_number"] control_number_author = data_author["control_number"] with cli.isolated_filesystem(): with open(f"{control_number_literature}.json", "wb") as f: f.write(orjson.dumps(data_literature)) with open(f"{control_number_author}.json", "wb") as f: f.write(orjson.dumps(data_author)) result = cli.invoke( [ "importer", "records", "-f", f"{control_number_literature}.json", "-f", f"{control_number_author}.json", ] ) result_record_literature = LiteratureRecord.get_record_by_pid_value( control_number_literature ) result_record_author = AuthorsRecord.get_record_by_pid_value( control_number_author ) assert result.exit_code == 0 assert control_number_literature == result_record_literature["control_number"] assert control_number_author == result_record_author["control_number"]
def test_after_redirection_old_record_is_aware_where_it_is_redirected(inspire_app): record_1 = create_record("lit") record_2 = create_record("lit", data={"deleted_records": [record_1["self"]]}) record_1_from_db = LiteratureRecord.get_record_by_pid_value( record_1["control_number"], original_record=True ) assert record_1_from_db.redirected_record_ref == record_2["self"]
def test_hepdata_harvest_happy_flow_with_date_passed(inspire_app, cli): record_1 = create_record("lit", data={"control_number": 1882568}) cli.invoke(["hepdata", "harvest", "--since", "2021-08-16"]) rec_1 = LiteratureRecord.get_record_by_pid_value(record_1["control_number"]) assert rec_1["external_system_identifiers"] == [ {"schema": "HEPDATA", "value": "ins1882568"} ]
def test_forced_undeleting_record_is_not_blocked(inspire_app): record = create_record("lit") record.delete() data = dict(record) del data["deleted"] record.update(data, force_undelete=True) rec = LiteratureRecord.get_record_by_pid_value(data["control_number"]) assert "deleted" not in rec
def test_migrate_and_insert_record_blacklisted_pid(inspire_app): raw_record = (b"<record>" b' <controlfield tag="001">12345</controlfield>' b' <datafield tag="980" ind1=" " ind2=" ">' b' <subfield code="a">HEP</subfield>' b" </datafield>" b"</record>") config = {"MIGRATION_PID_TYPE_BLACKLIST": ["lit"]} with patch.dict(current_app.config, config): migrate_and_insert_record(raw_record) with pytest.raises(PIDDoesNotExistError): LiteratureRecord.get_record_by_pid_value("12345") prod_record = LegacyRecordsMirror.query.filter( LegacyRecordsMirror.recid == 12345).one() assert prod_record.valid is False
def test_hepdata_harvest_happy_flow(inspire_app, cli): record_1 = create_record("lit", data={"control_number": 1882568}) record_2 = create_record("lit", data={"control_number": 1866118}) record_3 = create_record("lit", data={"control_number": 1833997}) cli.invoke(["hepdata", "harvest"]) rec_1 = LiteratureRecord.get_record_by_pid_value(record_1["control_number"]) rec_2 = LiteratureRecord.get_record_by_pid_value(record_2["control_number"]) rec_3 = LiteratureRecord.get_record_by_pid_value(record_3["control_number"]) assert rec_1["external_system_identifiers"] == [ {"schema": "HEPDATA", "value": "ins1882568"} ] assert rec_2["external_system_identifiers"] == [ {"schema": "HEPDATA", "value": "ins1866118"} ] assert rec_3["external_system_identifiers"] == [ {"schema": "HEPDATA", "value": "ins1833997"} ]
def test_undeleting_record_is_correctly_blocked(inspire_app): record = create_record("lit") record.delete() data = dict(record) del data["deleted"] with pytest.raises(ValidationError): record.update(data) rec = LiteratureRecord.get_record_by_pid_value(data["control_number"]) assert rec["deleted"] is True