def test_cds_sync_determines_last_run_date_correctly(inspire_app_for_cds_sync, cli): expected_external_identifiers = [{"schema": "CDS", "value": "123123"}] LiteratureRecord.create( faker.record("lit", data={"control_number": 321321})) db.session.add( CDSRun( task_id=uuid.uuid4(), date=datetime.date(2020, 12, 24), status=CDSRunStatus.FINISHED, )) db.session.add( CDSRun( task_id=uuid.uuid4(), date=datetime.date(2020, 12, 25), status=CDSRunStatus.ERROR, )) db.session.add( CDSRun( task_id=uuid.uuid4(), date=datetime.date(2020, 12, 23), status=CDSRunStatus.RUNNING, )) db.session.commit() cli.invoke(["cds", "sync"]) record = LiteratureRecord.get_record_by_pid_value("321321") assert record.get( "external_system_identifiers") == expected_external_identifiers
def test_institutions_record_updates_in_es_when_lit_rec_refers_to_it( inspire_app, clean_celery_session ): institution_1 = InstitutionsRecord.create(faker.record("ins")) institution_1_control_number = institution_1["control_number"] ref_1 = f"http://localhost:8000/api/institutions/{institution_1_control_number}" db.session.commit() expected_number_of_papers = 0 def assert_record(): current_search.flush_and_refresh("records-institutions") record_from_es = InstitutionsSearch().get_record_data_from_es(institution_1) assert expected_number_of_papers == record_from_es["number_of_papers"] retry_until_pass(assert_record) data = { "authors": [ { "full_name": "John Doe", "affiliations": [{"value": "Institution", "record": {"$ref": ref_1}}], } ] } LiteratureRecord.create(faker.record("lit", data)) db.session.commit() expected_number_of_papers = 1 def assert_record(): current_search.flush_and_refresh("records-institutions") record_from_es = InstitutionsSearch().get_record_data_from_es(institution_1) assert expected_number_of_papers == record_from_es["number_of_papers"] retry_until_pass(assert_record)
def test_cds_sync_record_when_there_is_already_correct_cds_identifier( inspire_app_for_cds_sync, ): expected_external_identifiers = [{"schema": "CDS", "value": "1273173"}] LiteratureRecord.create( faker.record( "lit", data={ "control_number": 1203988, "external_system_identifiers": [{ "schema": "CDS", "value": "1273173" }], }, )) db.session.commit() sync_identifiers("2020-07-01") runs = CDSRun.query.all() assert len(runs) == 1 assert runs[0].status == CDSRunStatus.FINISHED assert runs[0].message == "" record = LiteratureRecord.get_record_by_pid_value("1203988") assert record[ "external_system_identifiers"] == expected_external_identifiers
def test_literature_create_with_invalid_data_and_mutliple_pids( base_app, db, create_pidstore ): doi_value = faker.doi() arxiv_value = faker.arxiv() data = {"arxiv_eprints": [{"value": arxiv_value}], "dois": [{"value": doi_value}]} data = faker.record("lit", with_control_number=True, data=data) data["invalid_key"] = "should throw an error" pid_lit_value = str(data["control_number"]) pid_arxiv_value = arxiv_value pid_doi_value = doi_value with pytest.raises(ValidationError): LiteratureRecord.create(data) record_lit_pid = PersistentIdentifier.query.filter_by( pid_value=pid_lit_value ).one_or_none() record_arxiv_pid = PersistentIdentifier.query.filter_by( pid_value=pid_arxiv_value ).one_or_none() record_doi_pid = PersistentIdentifier.query.filter_by( pid_value=pid_doi_value ).one_or_none() assert record_lit_pid is None assert record_arxiv_pid is None assert record_doi_pid is None
def test_lit_record_updates_references_when_reference_is_added( inspire_app, clean_celery_session ): data_cited_record = faker.record("lit") cited_record = LiteratureRecord.create(data_cited_record) db.session.commit() data_citing_record = faker.record("lit") citing_record = LiteratureRecord.create(data_citing_record) db.session.commit() assert_citation_count(cited_record, 0) data_citing_record["references"] = [ { "record": { "$ref": f"http://localhost:5000/api/literature/{cited_record['control_number']}" } } ] data_citing_record["control_number"] = citing_record["control_number"] citing_record.update(data_citing_record) db.session.commit() assert_citation_count(cited_record, 1)
def test_experiment_record_updates_in_es_when_lit_rec_refers_to_it( inspire_app, celery_app_with_context, celery_session_worker): experiment_1 = ExperimentsRecord.create(faker.record("exp")) experiment_1_control_number = experiment_1["control_number"] ref_1 = f"http://localhost:8000/api/experiments/{experiment_1_control_number}" db.session.commit() expected_number_of_papers = 0 steps = [ { "step": current_search.flush_and_refresh, "args": ["records-experiments"] }, { "step": es_search, "args": ["records-experiments"], "expected_result": { "expected_key": "hits.total.value", "expected_result": 1, }, }, { "step": es_search, "args": ["records-experiments"], "expected_result": { "expected_key": "hits.hits[0]._source.number_of_papers", "expected_result": expected_number_of_papers, }, }, ] retry_until_matched(steps) data = { "accelerator_experiments": [{ "legacy_name": "LIGO", "record": { "$ref": ref_1 } }] } LiteratureRecord.create(faker.record("lit", data)) db.session.commit() expected_number_of_papers = 1 steps = [ { "step": current_search.flush_and_refresh, "args": ["records-experiments"] }, { "step": es_search, "args": ["records-experiments"], "expected_result": { "expected_key": "hits.hits[0]._source.number_of_papers", "expected_result": expected_number_of_papers, }, }, ] retry_until_matched(steps)
def test_import_article_doi_already_in_inspire(base_app, db, es): doi_value = faker.doi() data = {"dois": [{"value": doi_value}]} data = faker.record("lit", with_control_number=True, data=data) LiteratureRecord.create(data) with pytest.raises(ExistingArticleError): import_article(doi_value)
def test_import_article_arxiv_id_already_in_inspire(base_app, db, es): arxiv_value = faker.arxiv() data = {"arxiv_eprints": [{"value": arxiv_value}]} data = faker.record("lit", with_control_number=True, data=data) LiteratureRecord.create(data) with pytest.raises(ExistingArticleError): import_article(f"arXiv:{arxiv_value}")
def test_many_records_in_one_commit(inspire_app, clean_celery_session): for x in range(10): data = faker.record("lit") LiteratureRecord.create(data) db.session.commit() current_search.flush_and_refresh("records-hep") assert_es_hits_count(10)
def test_lit_record_reindexes_references_when_earliest_date_changed( app, celery_app_with_context, celery_session_worker, retry_until_matched): data_cited_record = faker.record("lit") cited_record = LiteratureRecord.create(data_cited_record) db.session.commit() citations = [cited_record["control_number"]] data_citing_record = faker.record("lit", literature_citations=citations, data={"preprint_date": "2018-06-28"}) citing_record = LiteratureRecord.create(data_citing_record) db.session.commit() steps = [ { "step": es.indices.refresh, "args": ["records-hep"] }, { "step": LiteratureSearch.get_record_data_from_es, "args": [cited_record], "expected_result": { "expected_key": "citations_by_year", "expected_result": [{ "count": 1, "year": 2018 }], }, }, ] retry_until_matched(steps) data_citing_record["preprint_date"] = "2019-06-28" citing_record.update(data_citing_record) db.session.commit() es.indices.refresh("records-hep") steps = [ { "step": es.indices.refresh, "args": ["records-hep"] }, { "step": LiteratureSearch.get_record_data_from_es, "args": [cited_record], "expected_result": { "expected_key": "citations_by_year", "expected_result": [{ "count": 1, "year": 2019 }], }, }, ] retry_until_matched(steps)
def test_revert_revision_works_correctly_and_runs_update(inspire_app): cited_data = { "$schema": "http://localhost:5000/schemas/records/hep.json", "titles": [{ "title": "Test a valid record" }], "document_type": ["article"], "_collections": ["Literature"], } cited_record = LiteratureRecord.create(cited_data) citing_data = { "$schema": "http://localhost:5000/schemas/records/hep.json", "titles": [{ "title": "My title" }], "document_type": ["article"], "_collections": ["Literature"], "preprint_date": "2019-06-28", "references": [{ "record": { "$ref": f"http://localhost:5000/api/literature/{cited_record['control_number']}" } }], } citing_record = LiteratureRecord.create(citing_data) db.session.commit() assert len(citing_record.model.references) == 1 assert len(cited_record.model.citations) == 1 assert citing_record.revision_id == 2 citing_record = LiteratureRecord.get_record(citing_record.id) data = dict(citing_record) del data["references"] citing_record.update(data) db.session.commit() citing_record = LiteratureRecord.get_record(citing_record.id) assert len(citing_record.model.references) == 0 assert len(cited_record.model.citations) == 0 assert citing_record.revision_id == 4 citing_record.revert(2) db.session.commit() citing_record = LiteratureRecord.get_record(citing_record.id) assert len(citing_record.model.references) == 1 assert len(cited_record.model.citations) == 1 # Reverted to revision 2 but added as next revision # so it will be revision 6 assert citing_record.revision_id == 6 assert dict(citing_record.revisions[2]) == dict(citing_record)
def test_conference_record_updates_in_es_when_lit_rec_refers_to_it( inspire_app, clean_celery_session): conference_1 = ConferencesRecord.create(faker.record("con")) conference_1_control_number = conference_1["control_number"] ref_1 = f"http://localhost:8000/api/conferences/{conference_1_control_number}" db.session.commit() expected_contributions_count = 0 def assert_record(): current_search.flush_and_refresh("records-conferences") record_from_es = ConferencesSearch().get_record_data_from_es( conference_1) assert expected_contributions_count == record_from_es[ "number_of_contributions"] retry_until_pass(assert_record) data = { "publication_info": [{ "conference_record": { "$ref": ref_1 } }], "document_type": ["conference paper"], } LiteratureRecord.create(faker.record("lit", data)) data = { "publication_info": [{ "conference_record": { "$ref": ref_1 }, "journal_title": "nice title" }], "document_type": ["proceedings"], } record2 = LiteratureRecord.create(faker.record("lit", data)) db.session.commit() def assert_record(): current_search.flush_and_refresh("records-conferences") record_from_es = ConferencesSearch().get_record_data_from_es( conference_1) assert expected_contributions_count == record_from_es[ "number_of_contributions"] retry_until_pass(assert_record) expected_proceedings = [ProceedingInfoItemSchemaV1().dump(record2).data] def assert_record(): current_search.flush_and_refresh("records-conferences") record_from_es = ConferencesSearch().get_record_data_from_es( conference_1) assert expected_proceedings == record_from_es["proceedings"] retry_until_pass(assert_record)
def test_lit_record_updates_references_when_reference_is_added( app, celery_app_with_context, celery_session_worker, retry_until_matched): data_cited_record = faker.record("lit") cited_record = LiteratureRecord.create(data_cited_record) db.session.commit() data_citing_record = faker.record("lit") citing_record = LiteratureRecord.create(data_citing_record) db.session.commit() steps = [ { "step": es.indices.refresh, "args": ["records-hep"] }, { "step": LiteratureSearch.get_record_data_from_es, "args": [cited_record], "expected_result": { "expected_key": "citation_count", "expected_result": 0 }, }, ] retry_until_matched(steps) data_citing_record["references"] = [{ "record": { "$ref": f"http://localhost:5000/api/literature/{cited_record['control_number']}" } }] citing_record.update(data_citing_record) db.session.commit() es.indices.refresh("records-hep") steps = [ { "step": es.indices.refresh, "args": ["records-hep"] }, { "step": LiteratureSearch.get_record_data_from_es, "args": [cited_record], "expected_result": { "expected_key": "citation_count", "expected_result": 1 }, }, ] retry_until_matched(steps)
def test_literature_create_with_existing_control_number(base_app, db, create_pidstore): data = faker.record("lit", with_control_number=True) existing_object_uuid = uuid4() create_pidstore( object_uuid=existing_object_uuid, pid_type="lit", pid_value=data["control_number"], ) with pytest.raises(PIDAlreadyExists): LiteratureRecord.create(data)
def test_create_record_update_citation_table(base_app, db, es): data = faker.record("lit") record = LiteratureRecord.create(data) data2 = faker.record("lit", literature_citations=[record["control_number"]]) record2 = LiteratureRecord.create(data2) assert len(record.model.citations) == 1 assert len(record.model.references) == 0 assert len(record2.model.citations) == 0 assert len(record2.model.references) == 1 assert len(RecordCitations.query.all()) == 1
def test_literature_create_with_invalid_data(base_app, db, create_pidstore): data = faker.record("lit", with_control_number=True) data["invalid_key"] = "should throw an error" record_control_number = str(data["control_number"]) with pytest.raises(ValidationError): LiteratureRecord.create(data) record_pid = PersistentIdentifier.query.filter_by( pid_value=record_control_number ).one_or_none() assert record_pid is None
def test_lit_records_with_citations_updates(inspire_app, clean_celery_session): data = faker.record("lit") rec = LiteratureRecord.create(data) db.session.commit() assert_citation_count(rec, 0) citations = [rec["control_number"]] data_2 = faker.record("lit", literature_citations=citations) LiteratureRecord.create(data_2) db.session.commit() assert_citation_count(rec, 1)
def test_cds_sync(inspire_app_for_cds_sync, cli): expected_external_identifiers = [{"schema": "CDS", "value": "1273173"}] LiteratureRecord.create( faker.record("lit", data={"control_number": 1203988})) db.session.commit() result = cli.invoke(["cds", "sync", "--since", "2020-07-01"]) assert "Starting CDS Sync." in result.output assert "Task didn't finish correctly." not in result.output record = LiteratureRecord.get_record_by_pid_value("1203988") assert record.get( "external_system_identifiers") == expected_external_identifiers
def test_literature_regression_changing_bai_in_record_reindex_records_which_are_citing_changed_one( inspire_app, celery_app_with_context, celery_session_worker, enable_self_citations): data = { "authors": [{ "full_name": "Jean-Luc Picard", "ids": [{ "schema": "INSPIRE BAI", "value": "Jean.L.Picard.1" }], }] } data = faker.record("lit", data=data) base_record = LiteratureRecord.create(data) citer_data = faker.record( "lit", literature_citations=[base_record["control_number"]]) citer = LiteratureRecord.create(citer_data) db.session.commit() steps = [ { "step": current_search.flush_and_refresh, "args": ["records-hep"] }, { "step": LiteratureSearch.get_record_data_from_es, "args": [citer], "expected_key": "referenced_authors_bais", "expected_result": ["Jean.L.Picard.1"], }, ] retry_until_matched(steps) data = dict(base_record) data["authors"][0]["ids"][0]["value"] = "Jean.L.Picard.2" base_record.update(data) db.session.commit() steps = [ { "step": current_search.flush_and_refresh, "args": ["records-hep"] }, { "step": LiteratureSearch.get_record_data_from_es, "args": [citer], "expected_key": "referenced_authors_bais", "expected_result": ["Jean.L.Picard.2"], }, ] retry_until_matched(steps)
def test_gracefully_handle_records_updating_in_wrong_order( inspire_app, clean_celery_session): # We want to run indexing in weird order, so disable auto indexing models_committed.disconnect(index_after_commit) cited_record = LiteratureRecord.create(data=faker.record("lit")) record_data = faker.record( "lit", literature_citations=[cited_record.control_number]) record = LiteratureRecord.create(data=record_data) db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record.control_number) index_record(record.id, record.model.versions[-1].version_id) assert LiteratureSearch().get_source( cited_record.id)["citation_count"] == 1 data = dict(record) del data["references"] record.update(data) db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record.control_number) data = dict(record) data["titles"][0] = {"title": "New Title"} record.update(data) db.session.commit() record = LiteratureRecord.get_record_by_pid_value(record.control_number) index_record(record.id, record.model.versions[-1].version_id) record = LiteratureRecord.get_record_by_pid_value(record.control_number) assert LiteratureSearch().get_source( cited_record.id)["citation_count"] == 1 assert LiteratureSearch().get_source(record.id)["titles"] == [{ "title": "New Title" }] index_record(record.id, record.model.versions[-2].version_id) assert LiteratureSearch().get_source( cited_record.id)["citation_count"] == 0 assert LiteratureSearch().get_source(record.id)["titles"] == [{ "title": "New Title" }] models_committed.connect(index_after_commit)
def test_phonetic_blocks_keep_order_in_redis_based_on_timestamp( base_app, db, es, redis ): with freeze_time(datetime.datetime(2015, 8, 18, 8, 51, 50)): author_data = {"authors": [{"full_name": "Ellis, John Richard"}]} data = faker.record("lit", data=author_data) InspireRecord.create(data) with freeze_time(datetime.datetime(2015, 8, 18, 9, 51, 50)): author_data2 = {"authors": [{"full_name": "Jimmy"}]} data2 = faker.record("lit", data=author_data2) LiteratureRecord.create(data2) assert "ELj" == redis.zpopmin("author_phonetic_blocks")[0][0] assert "JANY" == redis.zpopmin("author_phonetic_blocks")[0][0]
def test_process_references_in_records_with_different_type_of_records_doesnt_throw_an_exception( inspire_app, celery_app_with_context, celery_session_worker): # disconnect this signal so records don't get indexed models_committed.disconnect(index_after_commit) cited_record_1 = LiteratureRecord.create(faker.record("lit")) cited_record_2 = LiteratureRecord.create(faker.record("lit")) data_citing_record_1 = faker.record( "lit", literature_citations=[cited_record_1["control_number"]]) citing_record_1 = LiteratureRecord.create(data_citing_record_1) data_citing_record_2 = faker.record( "lit", literature_citations=[cited_record_2["control_number"]]) citing_record_2 = LiteratureRecord.create(data_citing_record_2) db.session.commit() records = [ create_record_async("aut"), create_record_async("job"), create_record_async("jou"), create_record_async("exp"), create_record_async("con"), create_record_async("dat"), create_record_async("ins"), ] # reconnect signal before we call process_references_in_records models_committed.connect(index_after_commit) uuids = [record.id for record in records] + [citing_record_1.id, citing_record_2.id] task = process_references_in_records.delay(uuids) results = task.get(timeout=5) uuids = [str(uuid) for uuid in uuids] assert results == uuids result_cited_record_1 = InspireSearch.get_record_data_from_es( cited_record_1) expected_result_cited_record_1_citation_count = 1 assert (expected_result_cited_record_1_citation_count == result_cited_record_1["citation_count"]) result_cited_record_2 = InspireSearch.get_record_data_from_es( cited_record_2) expected_result_cited_record_2_citation_count = 1 assert (expected_result_cited_record_2_citation_count == result_cited_record_2["citation_count"])
def test_lit_record_updates_references_when_record_is_deleted( app, celery_app_with_context, celery_session_worker, retry_until_matched): data_cited_record = faker.record("lit") cited_record = LiteratureRecord.create(data_cited_record) db.session.commit() citations = [cited_record["control_number"]] data_citing_record = faker.record("lit", literature_citations=citations) citing_record = LiteratureRecord.create(data_citing_record) db.session.commit() steps = [ { "step": es.indices.refresh, "args": ["records-hep"] }, { "step": LiteratureSearch.get_record_data_from_es, "args": [cited_record], "expected_result": { "expected_key": "citation_count", "expected_result": 1 }, }, ] retry_until_matched(steps) data_citing_record.update({"deleted": True}) citing_record.update(data_citing_record) db.session.commit() es.indices.refresh("records-hep") steps = [ { "step": es.indices.refresh, "args": ["records-hep"] }, { "step": LiteratureSearch.get_record_data_from_es, "args": [cited_record], "expected_result": { "expected_key": "citation_count", "expected_result": 0 }, }, ] retry_until_matched(steps)
def test_indexer_updates_conference_papers_when_name_changes( inspire_app, clean_celery_session): conference_data = faker.record( "con", data={"titles": [{ "title": "Initial Title" }]}) conference = ConferencesRecord.create(conference_data) db.session.commit() current_search.flush_and_refresh("records-conferences") conference_id = conference["control_number"] conference_paper_data = faker.record( "lit", data={ "document_type": ["conference paper"], "publication_info": [{ "conference_record": { "$ref": f"https://labs.inspirehep.net/api/conferences/{conference_id}" } }], }, ) LiteratureRecord.create(conference_paper_data) db.session.commit() def assert_literature_has_correct_conference_title(): current_search.flush_and_refresh("*") result = es_search("records-hep") total = get_value(result, "hits.total.value") assert total == 1 literature = get_value(result, "hits.hits[0]._source") ui_display = orjson.loads(literature["_ui_display"]) assert conference["titles"] == get_value(ui_display, "conference_info[0].titles") retry_until_pass(assert_literature_has_correct_conference_title, timeout=45) data = dict(conference) data["titles"] = [{"title": "Updated Title"}] conference.update(data) db.session.commit() retry_until_pass(assert_literature_has_correct_conference_title, timeout=45)
def test_process_references_in_records_process_author_records( mock_batch_index, inspire_app, clean_celery_session): author_record = AuthorsRecord.create(faker.record("aut")) lit_record = LiteratureRecord.create( faker.record( "lit", data={ "authors": [{ "full_name": author_record["name"]["value"], "record": author_record["self"], }] }, )) lit_record_2 = LiteratureRecord.create( faker.record( "lit", data={ "authors": [{ "full_name": author_record["name"]["value"], "record": author_record["self"], }] }, )) db.session.commit() def assert_records_in_es(): lit_record_from_es = InspireSearch.get_record_data_from_es(lit_record) lit_record_from_es_2 = InspireSearch.get_record_data_from_es( lit_record_2) aut_record_from_es = InspireSearch.get_record_data_from_es( author_record) assert lit_record_from_es and aut_record_from_es and lit_record_from_es_2 retry_until_pass(assert_records_in_es, retry_interval=5) models_committed.disconnect(index_after_commit) author_record["name"]["value"] = "Another Name" author_record.update(dict(author_record)) db.session.commit() # reconnect signal before we call process_references_in_records models_committed.connect(index_after_commit) task = process_references_in_records.delay([author_record.id]) task.get(timeout=5) assert sorted(mock_batch_index.mock_calls[0][1][0]) == sorted( [str(lit_record.id), str(lit_record_2.id)])
def test_lit_record_update_when_changed(app, celery_app_with_context, celery_session_worker, retry_until_matched): data = faker.record("lit") data["titles"] = [{"title": "Original title"}] rec = LiteratureRecord.create(data) db.session.commit() expected_title = "Updated title" data["titles"][0]["title"] = expected_title rec.update(data) db.session.commit() steps = [ { "step": es.indices.refresh, "args": ["records-hep"] }, { "step": es.search, "args": ["records-hep"], "expected_result": { "expected_key": "hits.total", "expected_result": 1 }, }, ] resp = retry_until_matched(steps) assert resp["hits"]["hits"][0]["_source"]["titles"][0][ "title"] == expected_title
def test_index_record_manually(app, celery_app_with_context, celery_session_worker, retry_until_matched): data = faker.record("lit") rec = LiteratureRecord.create(data) models_committed.disconnect(index_after_commit) db.session.commit() models_committed.connect(index_after_commit) es.indices.refresh("records-hep") result = es.search("records-hep") assert result["hits"]["total"] == 0 rec.index() steps = [ { "step": es.indices.refresh, "args": ["records-hep"] }, { "step": es.search, "args": ["records-hep"], "expected_result": { "expected_key": "hits.total", "expected_result": 1 }, }, ] retry_until_matched(steps)
def test_authors_signature_blocks_and_uuids_added_after_create_and_update( app, clear_environment): data = { "$schema": "http://localhost:5000/schemas/records/hep.json", "titles": [{ "title": "Test a valid record" }], "document_type": ["article"], "_collections": ["Literature"], "authors": [{ "full_name": "Doe, John" }], } record = LiteratureRecord.create(data) db.session.commit() record_control_number = record["control_number"] db_record = LiteratureRecord.get_record_by_pid_value(record_control_number) expected_signature_block = "Dj" assert expected_signature_block == db_record["authors"][0][ "signature_block"] assert "uuid" in db_record["authors"][0] expected_signature_block = "ELj" data.update({"authors": [{"full_name": "Ellis, Jane"}]}) record.update(data) db.session.commit() record_updated = LiteratureRecord.get_record_by_pid_value( record_control_number) assert expected_signature_block == record_updated["authors"][0][ "signature_block"]
def test_record_versioning(app, clear_environment): data = { "$schema": "http://localhost:5000/schemas/records/hep.json", "titles": [{ "title": "Test a valid record" }], "document_type": ["article"], "_collections": ["Literature"], } expected_version_created = 1 expected_count_created = 1 record = LiteratureRecord.create(data) record_control_number = record["control_number"] db.session.commit() assert expected_version_created == record.model.version_id assert expected_count_created == record.model.versions.count() with pytest.raises(AttributeError): record._previous_version expected_version_updated = 2 expected_count_updated = 2 record_updated = LiteratureRecord.get_record_by_pid_value( record_control_number) record_updated.update(dict(record_updated)) db.session.commit() assert expected_version_updated == record_updated.model.version_id assert expected_count_updated == record_updated.model.versions.count() assert record._previous_version
def test_oai_get_single_identifier_for_arxiv_set(inspire_app, celery_app_with_context, celery_session_worker): data = { "arxiv_eprints": [{ "value": "2009.01484" }], "report_numbers": [{ "value": "CERN-TH-2020-136" }], } record_data = faker.record("lit", data) record = LiteratureRecord.create(record_data) record_marcxml = record2marcxml(record) db.session.commit() set_name = inspire_app.config["OAI_SET_CERN_ARXIV"] oaiset = OAISet(spec=f"{set_name}", name="Test", description="Test") db.session.add(oaiset) db.session.commit() sleep(2) with inspire_app.test_client() as client: response = client.get( f"/api/oai2d?verb=GetRecord&metadataPrefix=marcxml&identifier=oai:inspirehep.net:{record['control_number']}" ) assert record_marcxml in response.data