def test_resource_type(app, db, minimal_record): """Test recid property.""" # String instead of number minimal_record['resource_type'] = 'publication' pytest.raises(ValidationError, Record.create, minimal_record) minimal_record['resource_type'] = {'type': 'publication', 'subtype': 'x'} Record.create(minimal_record)
def test_admin(app): """Test flask-admin interace.""" admin = Admin(app, name="Test") assert 'model' in record_adminview assert 'modelview' in record_adminview # Register both models in admin model = record_adminview.pop('model') view = record_adminview.pop('modelview') admin.add_view(view(model, db.session, **record_adminview)) # Check if generated admin menu contains the correct items menu_items = {str(item.name): item for item in admin.menu()} assert 'Records' in menu_items assert menu_items['Records'].is_category() submenu_items = { str(item.name): item for item in menu_items['Records'].get_children()} assert 'Record Metadata' in submenu_items assert isinstance(submenu_items['Record Metadata'], menu.MenuView) # Create a test record. with app.app_context(): rec_uuid = str(uuid.uuid4()) Record.create({'title': 'test'}, id_=rec_uuid) db.session.commit() with app.test_request_context(): index_view_url = url_for('recordmetadata.index_view') delete_view_url = url_for('recordmetadata.delete_view') detail_view_url = url_for( 'recordmetadata.details_view', id=rec_uuid) with app.test_client() as client: # List index view and check record is there. res = client.get(index_view_url) assert res.status_code == 200 # Fake a problem with SQLAlchemy. with patch('invenio_records.admin.Record') as db_mock: db_mock.side_effect = SQLAlchemyError() res = client.post( delete_view_url, data={'id': rec_uuid}, follow_redirects=True) assert res.status_code == 200 # Delete it. res = client.post( delete_view_url, data={'id': rec_uuid}, follow_redirects=True) assert res.status_code == 200 # View the delete record res = client.get(detail_view_url) assert res.status_code == 200 assert '<pre>null</pre>' in res.get_data(as_text=True) # Delete it again res = client.post( delete_view_url, data={'id': rec_uuid}, follow_redirects=True) assert res.status_code == 200
def setup_record_fixture(app): """Setup a record fixture.""" records = [] def _create_pid(record): pid = PersistentIdentifier.create( 'recid', record['recid'], pid_provider='recid') pid.assign('rec', record['recid']) pid.register() with before_record_insert.connected_to(_create_pid): with app.app_context(): records.append(Record.create( {'title': 'Test record 1', 'recid': 1}, identifier_key='recid' )) records.append(Record.create( {'title': 'Test record 2', 'recid': 2}, identifier_key='recid' )) pid = PersistentIdentifier.create('recid', 3, pid_provider='recid') db.session.add(pid) db.session.commit() pid = PersistentIdentifier.get('recid', 2, pid_provider='recid') pid.delete() db.session.commit() return records
def test_grant_schema_ep_resolving(app, db): """Test schema validation using entry-point registered schemas.""" json_valid = { '$schema': ( 'http://inveniosoftware.org/schemas/grants/grant-v1.0.0.json'), 'internal_id': '10.13039/001::0001', 'identifiers': { 'oai_id': 'oai_id00001', 'eurepo': '/eurepo/id00001', }, 'code': '0001', 'title': 'Grant Foobar', 'acronym': 'GF', 'startdate': 'startdate', 'enddate': 'startdate', 'funder': {'$ref': 'http://dx.doi.org/10.13039/001'}, } # Should not raise validation errors R.create(json_valid) # Should raise validation error because of the field 'acronyms' json_invalid = dict(json_valid) json_invalid['identifiers'] = 'not_an_object' with pytest.raises(ValidationError) as exc_info: R.create(json_invalid) assert exc_info.value.instance == 'not_an_object'
def test_listmetadataformats_record(app): """Test ListMetadataFormats for a record.""" schema = { 'type': 'object', 'properties': { 'title': {'type': 'string'}, 'field': {'type': 'boolean'}, }, 'required': ['title'], } with app.test_request_context(): with db.session.begin_nested(): record_id = uuid.uuid4() data = {'title': 'Test0', '$schema': schema} recid_minter(record_id, data) pid = oaiid_minter(record_id, data) Record.create(data, id_=record_id) pid_value = pid.pid_value db.session.commit() _listmetadataformats( app=app, query='/oai2d?verb=ListMetadataFormats&identifier={0}'.format( pid_value))
def oaiserver(number): """Initialize OAI-PMH server.""" from invenio_db import db from invenio_oaiserver.models import OAISet from invenio_records.api import Record # create a OAI Set with db.session.begin_nested(): for i in range(number): db.session.add(OAISet( spec='test{0}'.format(i), name='Test{0}'.format(i), description='test desc {0}'.format(i), search_pattern='title:Test{0}'.format(i), )) # create a record schema = { 'type': 'object', 'properties': { 'title': {'type': 'string'}, 'field': {'type': 'boolean'}, }, 'required': ['title'], } with db.session.begin_nested(): for i in range(number): record_id = uuid.uuid4() data = {'title': 'Test{0}'.format(i), '$schema': schema} recid_minter(record_id, data) oaiid_minter(record_id, data) Record.create(data, id_=record_id) db.session.commit()
def test_jsonalchemy_toint_usage(self): """Test the usage of ``to_int`` function in real life example. The ``test_toint`` model contains a field which contains an integer subfield. Whenever the record is obtained from ``MARCXML``, the string in mentioned subfield has to be converted to an integer. However, JSONAlchemy fills every absent subfield with a ``None`` value. If the record is not provided with the integer subfield and the built-in ``int`` function is used, the code will crash. The ``to_int`` function used inside definition of ``test_toint`` field prevents it. Here the unprovided subfield is ``999__a``. """ xml = '<collection><record><datafield tag="999" ind1="" ind2= "">' \ '<subfield code="b">Value</subfield></datafield></record>' \ '</collection>' from invenio_records.api import Record simple_record = Record.create(xml, master_format='marc', model="test_toint", namespace='testsuite') self.assertEqual(len(simple_record.__dict__['_dict']['__meta_metadata__']['__errors__']), 0) # Check if it works when the value is provided. xml = '<collection><record><datafield tag="999" ind1="" ind2= "">' \ '<subfield code="a">9999</subfield>' \ '<subfield code="b">Value</subfield></datafield></record>' \ '</collection>' simple_record = Record.create(xml, master_format='marc', model="test_toint", namespace='testsuite') self.assertEqual(simple_record['with_integers'][0]['some_int'], 9999)
def test_resource_type(app, db, minimal_record, val, passing): """Test resource type.""" minimal_record['resource_type'] = val if passing: Record.create(minimal_record) else: pytest.raises(ValidationError, Record.create, minimal_record)
def test_view_documents(app_assets_db): """Test the view invenio_previewer.document""" with app_assets_db.test_request_context(): with app_assets_db.test_client() as client: tmpdirname = tempfile.mktemp() if not os.path.exists(tmpdirname): os.makedirs(tmpdirname) content = ''.join(random.choice(string.ascii_letters) for i in range(16536)) filename = 'file.txt' filename_path = os.path.join(tmpdirname, filename) with open(filename_path, 'w') as file: file.write(content) rec_uuid = uuid.uuid4() with db.session.begin_nested(): Record.create({ "title": "TestDefault", "files": [{"uri": filename_path}] }, id_=rec_uuid) url = url_for('invenio_previewer.document', recid=rec_uuid, filename=filename) response = client.get(url) assert content == response.data.decode('ascii') shutil.rmtree(tmpdirname)
def grant_records(db, funder_record): """Create grant records.""" grants = [ Record.create(dict( internal_id='10.13039/501100000780::282896', funder={'$ref': 'https://dx.doi.org/10.13039/501100000780'}, identifiers=dict( eurepo='info:eu-repo/grantAgreement/EC/FP7/282896', ), code='282896', title='Open Access Research Infrastructure in Europe', acronym='OpenAIREplus', program='FP7', )), Record.create(dict( internal_id='10.13039/501100000780::027819', funder={'$ref': 'https://dx.doi.org/10.13039/501100000780'}, identifiers=dict( eurepo='info:eu-repo/grantAgreement/EC/FP6/027819', ), code='027819', title='Integrating cognition, emotion and autonomy', acronym='ICEA', program='FP6', )), ] for g in grants: PersistentIdentifier.create( pid_type='grant', pid_value=g['internal_id'], object_type='rec', object_uuid=g.id, status='R') db.session.commit() return grants
def records(): """Load test data fixture.""" import uuid from invenio_records.api import Record from invenio_pidstore.models import PersistentIdentifier, PIDStatus create_test_user() indexer = RecordIndexer() # Record 1 - Live record with db.session.begin_nested(): rec_uuid = uuid.uuid4() pid1 = PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered', 'description': 'This is an awesome description', 'control_number': '1', 'access_right': 'restricted', 'access_conditions': 'fuu', 'owners': [1, 2], 'recid': 1 }, id_=rec_uuid) indexer.index_by_id(pid1.object_uuid) db.session.commit() sleep(3)
def test_collection_tree_matcher(app): """Test database backend.""" # a # (None) # +------------------+--------------------+ # | | # b e # (None) (title:Test2 OR title:Test3) # +------+-----+ +------------+------------+ # | | | | | # c d f g h # (title:Test0) (title:Test1) (title:Test2) (None) (None) # | | # i j # (title:Test3) (title:Test4)) with app.test_request_context(): a = Collection(name="a") b = Collection(name="b", parent=a) e = Collection( name="e", dbquery="title:Test2 OR title:Test3", parent=a) c = Collection(name="c", dbquery="title:Test0", parent=b) d = Collection(name="d", dbquery="title:Test1", parent=b) f = Collection(name="f", dbquery="title:Test2", parent=e) g = Collection(name="g", parent=e) h = Collection(name="h", parent=e) i = Collection(name="i", dbquery="title:Test3", parent=g) j = Collection(name="j", dbquery="title:Test4", parent=h) with db.session.begin_nested(): for coll in [a, b, c, d, e, f, g, h, i, j]: db.session.add(coll) db.session.commit() # start tests schema = { 'type': 'object', 'properties': { 'title': {'type': 'string'}, 'field': {'type': 'boolean'}, 'hello': {'type': 'array'}, }, 'required': ['title'], } record0 = Record.create({'title': 'Test0', '$schema': schema}) record1 = Record.create({'title': 'Test1', '$schema': schema}) record2 = Record.create({'title': 'Test2', '$schema': schema}) record3 = Record.create({'title': 'Test3', '$schema': schema}) record4 = Record.create({'title': 'Test4', '$schema': schema}) assert set(record0['_collections']) == set(['a', 'c', 'b']) assert set(record1['_collections']) == set(['a', 'b', 'd']) assert set(record2['_collections']) == set(['a', 'e', 'f']) assert set(record3['_collections']) == set(['a', 'e', 'g', 'i']) assert set(record4['_collections']) == set(['h', 'j'])
def test_funder_ep_resolving(app, db): """Test funder resolving through entry point-registered JSON resolver.""" json1 = {"internal_id": "10.13039/001", "parent": "", "name": "Foo"} json2 = {"internal_id": "10.13039/002", "parent": {"$ref": "http://dx.doi.org/10.13039/001"}, "name": "Bar"} r1 = R.create(json1) PID.create("frdoi", json1["internal_id"], object_type="rec", object_uuid=r1.id, status=PIDStatus.REGISTERED) r2 = R.create(json2) PID.create("frdoi", json2["internal_id"], object_type="rec", object_uuid=r2.id, status=PIDStatus.REGISTERED) assert r2.replace_refs()["parent"] == json1
def test_identifier_schemes(app, db, minimal_record): """Test supported identifier schemes.""" supported_schemes = [s for s, _ in idutils.PID_SCHEMES] minimal_record['related_identifiers'] = [ {'scheme': scheme, 'relation': 'references', 'identifier': 'foobar'} for scheme in supported_schemes ] # JSONSchema validation should allow all supported schemes Record.create(minimal_record)
def test_file_download_ui(base_app, objects, db): """Test get buckets.""" app = base_app app.config.update(dict( RECORDS_UI_DEFAULT_PERMISSION_FACTORY=None, # No permission checking RECORDS_UI_ENDPOINTS=dict( recid=dict( pid_type='recid', route='/records/<pid_value>', ), recid_files=dict( pid_type='recid', route='/records/<pid_value>/files/<filename>', view_imp='invenio_files_rest.views.file_download_ui', ), ) )) InvenioRecords(app) InvenioPIDStore(app) InvenioRecordsUI(app) obj1 = objects[0] with app.app_context(): # Record 1 - Live record rec_uuid = uuid.uuid4() PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered', 'recid': 1, 'files': [ {'filename': obj1.key, 'bucket': str(obj1.bucket_id), 'checksum': 'invalid'}, ] }, id_=rec_uuid) db.session.commit() main_url = url_for('invenio_records_ui.recid', pid_value='1') file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename=obj1.key) no_file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename='') invalid_file_url = url_for( 'invenio_records_ui.recid_files', pid_value='1', filename='no') with app.test_client() as client: res = client.get(main_url) assert res.status_code == 200 res = client.get(file_url) assert res.status_code == 200 res = client.get(no_file_url) assert res.status_code == 404 res = client.get(invalid_file_url) assert res.status_code == 404
def test_contributors(app, db, minimal_record): """Test contributors.""" minimal_record['contributors'] = [ {'name': 'test', 'affiliation': 'test', 'type': 'ContactPerson'} ] Record.create(minimal_record) minimal_record['contributors'] = [ {'name': 'test', 'affiliation': 'test', 'type': 'Invalid'} ] pytest.raises(ValidationError, Record.create, minimal_record)
def test_contributors(app, db, minimal_record): """Test recid property.""" # String instead of number minimal_record['contributors'] = [ {'name': 'test', 'affiliation': 'test', 'type': 'ContactPerson'} ] Record.create(minimal_record) minimal_record['contributors'] = [ {'name': 'test', 'affiliation': 'test', 'type': 'Invalid'} ] pytest.raises(ValidationError, Record.create, minimal_record)
def test_model_init(app): """Test basic model initialization and actions.""" with app.app_context(): # Init the User and the Community user1 = create_test_user() comm1 = Community(id='comm1', id_user=user1.id) db.session.add(comm1) db.session.commit() communities_key = app.config["COMMUNITIES_RECORD_KEY"] # Create a record and accept it into the community by creating an # InclusionRequest and then calling the accept action rec1 = Record.create({'title': 'Foobar'}) InclusionRequest.create(community=comm1, record=rec1) assert InclusionRequest.query.count() == 1 comm1.accept_record(rec1) assert 'comm1' in rec1[communities_key] assert InclusionRequest.query.count() == 0 # Likewise, reject a record from the community rec2 = Record.create({'title': 'Bazbar'}) InclusionRequest.create(community=comm1, record=rec2) assert InclusionRequest.query.count() == 1 comm1.reject_record(rec2) assert communities_key not in rec2 # dict key should not be created assert InclusionRequest.query.count() == 0 # Add record to another community comm2 = Community(id='comm2', id_user=user1.id) db.session.add(comm2) db.session.commit() InclusionRequest.create(community=comm2, record=rec1) comm2.accept_record(rec1) assert communities_key in rec1 assert len(rec1[communities_key]) == 2 assert comm1.id in rec1[communities_key] assert comm2.id in rec1[communities_key] # Accept/reject a record to/from a community without inclusion request rec3 = Record.create({'title': 'Spam'}) pytest.raises(InclusionRequestMissingError, comm1.accept_record, rec3) pytest.raises(InclusionRequestMissingError, comm1.reject_record, rec3) # Create two inclusion requests comm3 = Community(id='comm3', id_user=user1.id) db.session.add(comm3) db.session.commit() InclusionRequest.create(community=comm3, record=rec1) pytest.raises(InclusionRequestExistsError, InclusionRequest.create, community=comm3, record=rec1) # Try to accept a record to a community twice (should raise) # (comm1 is already in rec1) pytest.raises(InclusionRequestObsoleteError, InclusionRequest.create, community=comm1, record=rec1)
def records(): """Load test data fixture.""" with db.session.begin_nested(): for idx in range(20): # create the record Record.create({ 'title': 'LHC experiment {}'.format(idx), 'description': 'Data from experiment {}.'.format(idx), 'type': 'data', }) db.session.commit()
def test_remote_openaire_loader(app, db): """Test the remote OAI-PMH OpenAIRE loader.""" loader = RemoteOAIRELoader() pytest.raises(OAIRELoadingError, list, loader.iter_grants()) recuuid = uuid.uuid4() PersistentIdentifier.create( 'frdoi', '10.13039/501100000925', object_type='rec', object_uuid=recuuid, status='R') Record.create({'acronyms': ['EC']}, id_=recuuid) records = list(loader.iter_grants()) assert len(records) == 5
def test_view_preview_default_extension(app_assets_db): """Test view by default.""" with app_assets_db.test_request_context(): with app_assets_db.test_client() as client: rec_uuid = uuid.uuid4() with db.session.begin_nested(): Record.create({ "title": "TestDefault", "files": [{"uri": "/tmp/TestDefault.def"}] }, id_=rec_uuid) url = url_for('invenio_previewer.preview', recid=rec_uuid) response = client.get(url) assert 'we are unfortunately not' in response.data.decode('utf-8')
def records(): """Load test data fixture.""" import uuid from invenio_records.api import Record from invenio_pidstore.models import PersistentIdentifier, PIDStatus # Record 1 - Live record with db.session.begin_nested(): pid1 = PersistentIdentifier.create( 'recid', '1', object_type='rec', object_uuid=rec1_uuid, status=PIDStatus.REGISTERED) Record.create({ 'title': 'Registered ', 'authors': [ {'name': 'Ellis Jonathan'}, {'name': 'Higgs Peter'}, ], 'keywords': ['CERN', 'higgs'], }, id_=rec1_uuid) # Record 2 - Deleted PID with record rec2_uuid = uuid.uuid4() pid = PersistentIdentifier.create( 'recid', '2', object_type='rec', object_uuid=rec2_uuid, status=PIDStatus.REGISTERED) pid.delete() Record.create({'title': 'Live '}, id_=rec2_uuid) # Record 3 - Deleted PID without a record PersistentIdentifier.create( 'recid', '3', status=PIDStatus.DELETED) # Record 4 - Registered PID without a record PersistentIdentifier.create( 'recid', '4', status=PIDStatus.REGISTERED) # Record 5 - Redirected PID pid = PersistentIdentifier.create( 'recid', '5', status=PIDStatus.REGISTERED) pid.redirect(pid1) # Record 6 - Redirected non existing endpoint doi = PersistentIdentifier.create( 'doi', '10.1234/foo', status=PIDStatus.REGISTERED) pid = PersistentIdentifier.create( 'recid', '6', status=PIDStatus.REGISTERED) pid.redirect(doi) # Record 7 - Unregistered PID PersistentIdentifier.create( 'recid', '7', status=PIDStatus.RESERVED)
def populate(records, collections, file_input=None, input_type=None, force_import=None): """Train a set of records from the command line. Usage: inveniomanage predicter train -r /path/to/json -o model.pickle """ if records is None and collections is None: # We harvest all print("Migrating all records...", file=sys.stderr) if records: print("Migrating records: {0}".format(",".join(records))) if collections: print("Migrating collections: {0}".format(",".join(collections))) if file_input and not os.path.isfile(file_input): print("{0} is not a file!".format(file_input), file=sys.stderr) return if file_input: print("Migrating records from file: {0}".format(file_input)) if force_import: # Load signal handler from inspire.modules.records.receivers import insert_record processor = current_app.config['RECORD_PROCESSORS'][input_type] if isinstance(processor, six.string_types): processor = import_string(processor) data = processor(open(file_input)) if isinstance(data, dict): Record.create(data) else: [Record.create(item) for item in data] db.session.commit() if force_import: # Disable signal handler from inspire.modules.records.receivers import remove_handler remove_handler() else: legacy_base_url = current_app.config.get("CFG_INSPIRE_LEGACY_BASEURL") print( "Migrating records from {0}".format( legacy_base_url ), file=sys.stderr ) job = migrate.delay(legacy_base_url, records=records, collections=collections, file_input=file_input) print("Scheduled migration job {0}".format(job.id))
def test_oaire_dumper(db, sqlite_tmpdb): recuuid = uuid.uuid4() PersistentIdentifier.create( 'frdoi', '10.13039/501100000925', object_type='rec', object_uuid=recuuid, status='R') Record.create({'acronyms': ['EC']}, id_=recuuid) dumper = OAIREDumper(destination=sqlite_tmpdb) # We expect to harvest 5 record from the MockSickle. # with 'commit_batch_size=2', we will make 3 commits to sqlite db dumper.dump(commit_batch_size=2) loader = LocalOAIRELoader(source=sqlite_tmpdb) records = list(loader.iter_grants()) assert len(records) == 5
def records(): """Load records.""" import pkg_resources from invenio_records.api import Record from dojson.contrib.marc21 import marc21 from dojson.contrib.marc21.utils import create_record, split_blob # pkg resources the demodata data_path = pkg_resources.resource_filename( 'invenio_records', 'data/marc21/bibliographic.xml' ) with open(data_path) as source: with db.session.begin_nested(): for data in split_blob(source.read()): Record.create(marc21.do(create_record(data)))
def bibtex_records(app, db, full_record): """Create some records for bibtex serializer.""" test_bad_record = dict(recid='12345') r_good = Record.create( full_record, UUID("24029cb9-f0f8-4b72-94a7-bdf746f9d075")) r_bad = Record.create( test_bad_record, UUID("0281c22c-266a-499b-8446-e12eff2f79b8")) db.session.commit() record_good = Bibtex(r_good) record_bad = Bibtex(r_bad) record_empty = Bibtex({}) yield (record_good, record_bad, record_empty, r_good)
def test_signals(testapp, database, signals): """Test signals being sent.""" db = database record = Record.create({'title': 'Test'}) db.session.commit() assert 'before_record_insert' in signals assert 'after_record_insert' in signals assert len(signals.keys()) == 2 record['title'] = 'Test2' record.commit() db.session.commit() assert 'before_record_update' in signals assert 'after_record_update' in signals assert len(signals.keys()) == 4 record.revert(0) db.session.commit() assert 'before_record_revert' in signals assert 'after_record_revert' in signals assert len(signals.keys()) == 6 record.delete() db.session.commit() assert 'before_record_delete' in signals assert 'after_record_delete' in signals assert len(signals.keys()) == 8
def test_stats(app, db, minimal_record, stats, expected_result): """Test record stats.""" record = Record.create(minimal_record) with patch('zenodo.modules.records.views.get_record_stats', return_value=stats) as m: # local DOI record['doi'] = '10.5072/foo' template = render_template('zenodo_records/box/record_stats.html', record=record) assert expected_result['views'] in template assert expected_result['downloads'] in template assert expected_result['volume'] in template assert expected_result['unique_views'] in template assert expected_result['unique_downloads'] in template assert 'All versions' in template assert expected_result['version_views'] in template assert expected_result['version_downloads'] in template assert expected_result['version_volume'] in template assert expected_result['version_unique_views'] in template assert expected_result['version_unique_downloads'] in template # not local DOI record['doi'] = '.dsfsdf' template = render_template('zenodo_records/box/record_stats.html', record=record) assert expected_result['views'] in template assert expected_result['downloads'] in template assert expected_result['volume'] in template assert expected_result['unique_views'] in template assert expected_result['unique_downloads'] in template assert 'All versions' not in template
def test_deposit_index(db, es): """Test update embargoed records.""" deposit_index_name = 'deposits-records-record-v1.0.0' rec1 = Record.create({ 'title': 'One', '_deposit': { 'status': 'published', 'pid': { 'type': 'recid', 'value': '1' } } }) PersistentIdentifier.create(pid_type='recid', pid_value='1', status=PIDStatus.REGISTERED, object_uuid=rec1.id, object_type='rec') Deposit.create({ '_deposit': { 'status': 'published', 'pid': { 'type': 'recid', 'value': '1' } } }) db.session.commit() current_search.flush_and_refresh(deposit_index_name) res = RecordsSearch(index=deposit_index_name).execute() # Make sure the 'title' was indexed from record assert res['hits']['hits'][0]['_source']['title'] == 'One'
def create_or_update_record(data, pid_type, id_key, minter): """Register a funder or grant.""" resolver = Resolver(pid_type=pid_type, object_type='rec', getter=Record.get_record) try: pid, record = resolver.resolve(data[id_key]) data_c = deepcopy(data) del data_c['remote_modified'] record_c = deepcopy(record) del record_c['remote_modified'] # All grants on OpenAIRE are modified periodically even if nothing # has changed. We need to check for actual differences in the metadata if data_c != record_c: record.update(data) record.commit() record_id = record.id db.session.commit() RecordIndexer().index_by_id(str(record_id)) except PIDDoesNotExistError: record = Record.create(data) record_id = record.id minter(record.id, data) db.session.commit() RecordIndexer().index_by_id(str(record_id))
def test_datacite_register_fail(mocker, app, db, es, minimal_record): # Make the datacite API unavailable dc_mock = mocker.patch( 'invenio_pidstore.providers.datacite.DataCiteMDSClient') dc_mock().metadata_post.side_effect = datacite.errors.HttpError() # Create a reserved recid record = Record.create(minimal_record) record_uuid = record.id recid = record['recid'] recid_pid = PersistentIdentifier.create('recid', recid, status=PIDStatus.RESERVED) # Mint the record zenodo_record_minter(record_uuid, record) record.commit() db.session.commit() with pytest.raises(datacite.errors.HttpError): datacite_register.apply((recid_pid.pid_value, str(record_uuid))) # Check that the task was retried ("max_retries" + 1) times dc_calls = len(dc_mock().metadata_post.mock_calls) assert dc_calls == datacite_register.max_retries + 1
def test_citation_formatter_citeproc_get(api, api_client, es, db, full_record, users): """Test records REST citeproc get.""" r = Record.create(full_record) pid = PersistentIdentifier.create('recid', '12345', object_type='rec', object_uuid=r.id, status=PIDStatus.REGISTERED) db.session.commit() db.session.refresh(pid) RecordIndexer().index_by_id(r.id) current_search.flush_and_refresh(index='records') login_user_via_session(api_client, email=users[2]['email']) with api.test_request_context(): records_url = url_for('invenio_records_rest.recid_item', pid_value=pid.pid_value) res = api_client.get(records_url, query_string={'style': 'apa'}, headers={'Accept': 'text/x-bibliography'}) assert res.status_code == 200 assert 'Doe, J.' in res.get_data(as_text=True) assert 'Test title (Version 1.2.5).' in res.get_data(as_text=True) assert '(2014).' in res.get_data(as_text=True)
def post(self, **kwargs): """Create a record. :returns: The created record. """ if request.content_type != 'application/json': abort(415) # TODO: accept non json content (MARC21...) data = request.get_json() if data is None: return abort(400) try: # Create uuid for record record_uuid = uuid.uuid4() # Create persistent identifier pid = self.minter(record_uuid, data=data) # Create record record = Record.create(data, id_=record_uuid) db.session.commit() except SQLAlchemyError: db.session.rollback() current_app.logger.exception("Failed to create record.") abort(500) return self.make_response(pid, record, 201)
def test_bulkrecordindexer_index_delete_by_record(app, queue): """Test utility class BulkRecordIndexer index/delete by record object.""" with app.app_context(): with establish_connection() as c: recid = uuid.uuid4() record = Record.create({'title': 'Test'}, id_=recid) db.session.commit() indexer = BulkRecordIndexer() indexer.index(record) indexer.delete(record) consumer = Consumer(connection=c, queue=indexer.mq_queue.name, exchange=indexer.mq_exchange.name, routing_key=indexer.mq_routing_key) messages = list(consumer.iterqueue()) [m.ack() for m in messages] assert len(messages) == 2 data0 = messages[0].decode() assert data0['id'] == str(recid) assert data0['op'] == 'index' data1 = messages[1].decode() assert data1['id'] == str(recid) assert data1['op'] == 'delete'
def test_index(app): """Test record indexing.""" with app.app_context(): recid = uuid.uuid4() record = Record.create({'title': 'Test'}, id_=recid) db.session.commit() client_mock = MagicMock() RecordIndexer(search_client=client_mock, version_type='force').index( record, arguments={'pipeline': 'foobar'}) doc_type = app.config['INDEXER_DEFAULT_DOC_TYPE'] if lt_es7 else '_doc' client_mock.index.assert_called_with( id=str(recid), version=0, version_type='force', index=app.config['INDEXER_DEFAULT_INDEX'], doc_type=doc_type, body={ 'title': 'Test', '_created': pytz.utc.localize(record.created).isoformat(), '_updated': pytz.utc.localize(record.updated).isoformat(), }, pipeline='foobar', ) with patch('invenio_indexer.api.RecordIndexer.index') as fun: RecordIndexer(search_client=client_mock).index_by_id(recid) assert fun.called
def import_records(dojson_model, schema, xmlfile): """Helper to import a MARCXML file for given schema.""" ids = [] from invenio_pidstore import current_pidstore with db.session.begin_nested(): with open(xmlfile, 'rb') as fp: for item in load(fp): # Transform MARCXML to JSON data = dojson_model.do(item) # TODO: Add schema once schema validation has been fixed. # Schema # data['$schema'] = schema # Create a UUID for the record id_ = uuid.uuid4() # FIXME: Strip off control number otherwise minter will fail. if 'control_number' in data: del data['control_number'] # Mint a recid and OAI id. pid = current_pidstore.minters['recid'](id_, data) current_pidstore.minters['oaiid'](id_, data) # Store record. record = Record.create(data, id_=id_) click.echo('Created record {}'.format(pid.pid_value)) ids.append(id_) return ids
def test_index_action(app): """Test index action.""" with app.app_context(): record = Record.create({'title': 'Test'}) db.session.commit() def receiver(sender, json=None, record=None, arguments=None, **kwargs): json['extra'] = 'extra' arguments['pipeline'] = 'foobar' with before_record_index.connected_to(receiver): action = RecordIndexer()._index_action( dict( id=str(record.id), op='index', )) assert action['_op_type'] == 'index' assert action['_index'] == app.config['INDEXER_DEFAULT_INDEX'] assert action['_id'] == str(record.id) if lt_es7: assert action['_type'] == \ app.config['INDEXER_DEFAULT_DOC_TYPE'] else: assert action['_type'] == '_doc' assert action['_version'] == record.revision_id assert action['_version_type'] == 'external_gte' assert action['pipeline'] == 'foobar' assert 'title' in action['_source'] assert 'extra' in action['_source']
def post(self, **kwargs): """Create a record. :returns: The created record. """ if request.content_type != 'application/json': abort(415) # TODO: accept non json content (MARC21...) data = request.get_json() if data is None: return abort(400) try: # Create uuid for record record_uuid = uuid.uuid4() # Create persistent identifier pid = self.minter(record_uuid, data=data) # Create record record = Record.create(data, id_=record_uuid) # Check permissions permission_factory = self.create_permission_factory or \ current_records_rest.create_permission_factory if permission_factory: verify_record_permission(permission_factory, record) db.session.commit() except SQLAlchemyError: db.session.rollback() current_app.logger.exception('Failed to create record.') abort(500) return self.make_response(pid, record, 201)
def test_citation_formatter_citeproc_get(api, api_client, es, db, full_record, users): """Test records REST citeproc get.""" r = Record.create(full_record) pid = PersistentIdentifier.create( 'recid', '12345', object_type='rec', object_uuid=r.id, status=PIDStatus.REGISTERED) db.session.commit() db.session.refresh(pid) RecordIndexer().index_by_id(r.id) current_search.flush_and_refresh(index='records') login_user_via_session(api_client, email=users[2]['email']) with api.test_request_context(): records_url = url_for('invenio_records_rest.recid_item', pid_value=pid.pid_value) res = api_client.get(records_url, query_string={'style': 'apa'}, headers={'Accept': 'text/x-bibliography'}) assert res.status_code == 200 assert 'Doe, J.' in res.get_data(as_text=True) assert 'Test title (Version 1.2.5).' in res.get_data(as_text=True) assert '(2014).' in res.get_data(as_text=True)
def records(): """Load records.""" import pkg_resources import uuid from dojson.contrib.marc21 import marc21 from dojson.contrib.marc21.utils import create_record, split_blob from invenio_pidstore import current_pidstore from invenio_records.api import Record # pkg resources the demodata data_path = pkg_resources.resource_filename( 'invenio_records', 'data/marc21/bibliographic.xml' ) with open(data_path) as source: indexer = RecordIndexer() with db.session.begin_nested(): for index, data in enumerate(split_blob(source.read()), start=1): # create uuid rec_uuid = uuid.uuid4() # do translate record = marc21.do(create_record(data)) # create PID current_pidstore.minters['recid_minter']( rec_uuid, record ) # create record indexer.index(Record.create(record, id_=rec_uuid)) db.session.commit()
def test_record_page(app, db, es, event_queues, full_record): """Test record page views.""" full_record['conceptdoi'] = '10.1234/foo.concept' full_record['conceptrecid'] = 'foo.concept' r = Record.create(full_record) PersistentIdentifier.create( 'recid', '12345', object_type='rec', object_uuid=r.id, status=PIDStatus.REGISTERED) db.session.commit() with app.test_client() as client: record_url = url_for('invenio_records_ui.recid', pid_value='12345') assert client.get(record_url).status_code == 200 process_events(['record-view']) current_search.flush_and_refresh(index='events-stats-record-view') prefix = app.config['SEARCH_INDEX_PREFIX'] search = Search(using=es, index=prefix+'events-stats-record-view') assert search.count() == 1 doc = search.execute()[0] assert doc['doi'] == '10.1234/foo.bar' assert doc['conceptdoi'] == '10.1234/foo.concept' assert doc['recid'] == '12345' assert doc['conceptrecid'] == 'foo.concept' assert doc['resource_type'] == {'type': 'publication', 'subtype': 'book'} assert doc['access_right'] == 'open' assert doc['communities'] == ['zenodo'] assert doc['owners'] == [1]
def test_loans_jsonresolver(loan_pending_martigny): """Test loan json resolver.""" rec = Record.create({'loan': {'$ref': 'https://bib.rero.ch/api/loans/1'}}) assert rec.replace_refs().get('loan') == {'type': 'loanid', 'pid': '1'} # deleted record loan_pending_martigny.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create( {'loan': { '$ref': 'https://bib.rero.ch/api/loans/n_e' }}) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_listmetadataformats_record(app): """Test ListMetadataFormats for a record.""" with app.test_request_context(): with db.session.begin_nested(): record_id = uuid.uuid4() data = {'title_statement': {'title': 'Test0'}} recid_minter(record_id, data) pid = oaiid_minter(record_id, data) Record.create(data, id_=record_id) pid_value = pid.pid_value db.session.commit() _listmetadataformats( app=app, query='/oai2d?verb=ListMetadataFormats&identifier={0}'.format( pid_value))
def test_oaire_dumper(db, sqlite_tmpdb): """Test the grants dumper to local destination.""" recuuid = uuid.uuid4() PersistentIdentifier.create('frdoi', '10.13039/501100000925', object_type='rec', object_uuid=recuuid, status='R') Record.create({'acronyms': ['EC']}, id_=recuuid) dumper = OAIREDumper(destination=sqlite_tmpdb) # We expect to harvest 5 record from the MockSickle. # with 'commit_batch_size=2', we will make 3 commits to sqlite db dumper.dump(commit_batch_size=2) loader = LocalOAIRELoader(source=sqlite_tmpdb) records = list(loader.iter_grants()) assert len(records) == 5
def test_grant_linking(app, db, minimal_record, grant_record): """Test grant linking.""" minimal_record['grants'] = [{ '$ref': 'http://dx.zenodo.org/grants/10.13039/501100000780::282896'}] record = current_app.extensions['invenio-records'].replace_refs( Record.create(minimal_record)) assert record['grants'][0]['funder']['name'] == 'European Commission' record.validate()
def test_acq_orders_jsonresolver(acq_order_fiction_martigny): """Acquisition orders resolver tests.""" rec = Record.create({ 'acq_order': {'$ref': 'https://bib.rero.ch/api/acq_orders/acor1'} }) assert extracted_data_from_ref(rec.get('acq_order')) == 'acor1' # deleted record acq_order_fiction_martigny.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create({ 'acq_order': {'$ref': 'https://bib.rero.ch/api/acq_orders/n_e'} }) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_model_init(app, db, communities): """Test basic model initialization and actions.""" (comm1, comm2, comm3) = communities communities_key = app.config["COMMUNITIES_RECORD_KEY"] # Create a record and accept it into the community by creating an # InclusionRequest and then calling the accept action rec1 = Record.create({'title': 'Foobar'}) InclusionRequest.create(community=comm1, record=rec1) assert InclusionRequest.query.count() == 1 comm1.accept_record(rec1) assert 'comm1' in rec1[communities_key] assert InclusionRequest.query.count() == 0 # Likewise, reject a record from the community rec2 = Record.create({'title': 'Bazbar'}) InclusionRequest.create(community=comm1, record=rec2) assert InclusionRequest.query.count() == 1 comm1.reject_record(rec2) assert communities_key not in rec2 # dict key should not be created assert InclusionRequest.query.count() == 0 # Add record to another community InclusionRequest.create(community=comm2, record=rec1) comm2.accept_record(rec1) assert communities_key in rec1 assert len(rec1[communities_key]) == 2 assert comm1.id in rec1[communities_key] assert comm2.id in rec1[communities_key] # Accept/reject a record to/from a community without inclusion request rec3 = Record.create({'title': 'Spam'}) pytest.raises(InclusionRequestMissingError, comm1.accept_record, rec3) pytest.raises(InclusionRequestMissingError, comm1.reject_record, rec3) # Create two inclusion requests InclusionRequest.create(community=comm3, record=rec1) db.session.commit() db.session.flush() pytest.raises(InclusionRequestExistsError, InclusionRequest.create, community=comm3, record=rec1) # Try to accept a record to a community twice (should raise) # (comm1 is already in rec1) pytest.raises(InclusionRequestObsoleteError, InclusionRequest.create, community=comm1, record=rec1)
def test_patrons_jsonresolver(system_librarian_martigny_no_email): """Test patron json resolver.""" rec = Record.create({ 'patron': {'$ref': 'https://ils.rero.ch/api/patrons/ptrn1'} }) assert rec.replace_refs().get('patron') == {'pid': 'ptrn1'} # deleted record system_librarian_martigny_no_email.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create({ 'patron': {'$ref': 'https://ils.rero.ch/api/patrons/n_e'} }) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_patron_types_jsonresolver(app, patron_type_tmp): """Test patron type resolver.""" rec = Record.create({ 'patron_type': {'$ref': 'https://ils.rero.ch/api/patron_types/1'} }) assert rec.replace_refs().get('patron_type') == {'pid': '1'} # deleted record patron_type_tmp.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create({ 'patron_type': {'$ref': 'https://ils.rero.ch/api/patron_types/n_e'} }) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_item_types_jsonresolver(item_type_standard_martigny): """Item type resolver tests.""" rec = Record.create({ 'item_type': {'$ref': 'https://ils.rero.ch/api/item_types/itty1'} }) assert rec.replace_refs().get('item_type') == {'pid': 'itty1'} # deleted record item_type_standard_martigny.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create({ 'item_type': {'$ref': 'https://ils.rero.ch/api/item_types/n_e'} }) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_organisations_jsonresolver(app, tmp_organisation): """.""" rec = Record.create({ 'organisation': {'$ref': 'https://ils.rero.ch/api/organisations/1'} }) assert rec.replace_refs().get('organisation') == {'pid': '1'} # deleted record tmp_organisation.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create({ 'organisation': {'$ref': 'https://ils.rero.ch/api/organisations/n_e'} }) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_locations_jsonresolver(loc_public_martigny): """Test location json resolver.""" rec = Record.create({ 'location': {'$ref': 'https://ils.rero.ch/api/locations/loc1'} }) assert rec.replace_refs().get('location') == {'pid': 'loc1'} # deleted record loc_public_martigny.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create({ 'location': {'$ref': 'https://ils.rero.ch/api/locations/n_e'} }) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_budgets_jsonresolver(budget_2017_martigny): """Budgets resolver tests.""" rec = Record.create({ 'budget': {'$ref': 'https://ils.rero.ch/api/budgets/budg5'} }) assert extracted_data_from_ref(rec.get('budget')) == 'budg5' # deleted record budget_2017_martigny.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create({ 'budget': {'$ref': 'https://ils.rero.ch/api/budgets/n_e'} }) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_listrecords(app): """Test ListRecords.""" schema = { 'type': 'object', 'properties': { 'title': { 'type': 'string' }, 'field': { 'type': 'boolean' }, }, 'required': ['title'], } with app.test_request_context(): indexer = RecordIndexer() with db.session.begin_nested(): record_id = uuid.uuid4() data = {'title': 'Test0', '$schema': schema} recid_minter(record_id, data) oaiid_minter(record_id, data) record = Record.create(data, id_=record_id) db.session.commit() indexer.index_by_id(record_id) sleep(2) with app.test_client() as c: result = c.get('/oai2d?verb=ListRecords&metadataPrefix=oai_dc') tree = etree.fromstring(result.data) assert len(tree.xpath('/x:OAI-PMH', namespaces=NAMESPACES)) == 1 assert len( tree.xpath('/x:OAI-PMH/x:ListRecords', namespaces=NAMESPACES)) == 1 assert len( tree.xpath('/x:OAI-PMH/x:ListRecords/x:record', namespaces=NAMESPACES)) == 1 assert len( tree.xpath('/x:OAI-PMH/x:ListRecords/x:record/x:header', namespaces=NAMESPACES)) == 1 assert len( tree.xpath( '/x:OAI-PMH/x:ListRecords/x:record/x:header' '/x:identifier', namespaces=NAMESPACES)) == 1 assert len( tree.xpath( '/x:OAI-PMH/x:ListRecords/x:record/x:header' '/x:datestamp', namespaces=NAMESPACES)) == 1 assert len( tree.xpath('/x:OAI-PMH/x:ListRecords/x:record/x:metadata', namespaces=NAMESPACES)) == 1
def test_documents_jsonresolver(document): """Test document json resolver.""" rec = Record.create({ 'document': {'$ref': 'https://ils.rero.ch/api/documents/doc1'} }) assert rec.replace_refs().get('document') == {'pid': 'doc1'} # deleted record document.delete() with pytest.raises(JsonRefError): rec.replace_refs().dumps() # non existing record rec = Record.create({ 'document': {'$ref': 'https://ils.rero.ch/api/documents/n_e'} }) with pytest.raises(JsonRefError): rec.replace_refs().dumps()
def test_deposits_transform(app, db, deposit_dump): """Test deposit transformation.""" # Importing transform_deposit is causing errors from zenodo_migrator.deposit import transform_deposit deposit_dump = [deposit_dump[6]] for idx, (inp, expected) in enumerate(deposit_dump, 1): deposit = Record.create(inp) transformed = transform_deposit(deposit) assert transformed == expected, "Failed at testcase {0}".format(idx)
def test_before_record_index_dynamic_connect(app): """Test before_record_index.dynamic_connect.""" with app.app_context(): with patch("invenio_records.api._records_state.validate"): auth_record = Record.create( { "$schema": "/records/authorities/authority-v1.0.0.json", "title": "Test", } ) bib_record = Record.create( { "$schema": "/records/bibliographic/bibliographic-v1.0.0.json", "title": "Test", } ) db.session.commit() def _simple(sender, json=None, **kwargs): json["simple"] = "simple" def _custom(sender, json=None, **kwargs): json["custom"] = "custom" def _cond(sender, connect_kwargs, index=None, **kwargs): return "bibliographic" in index _receiver1 = before_record_index.dynamic_connect( _simple, index="records-authorities-authority-v1.0.0" ) _receiver2 = before_record_index.dynamic_connect(_custom, condition_func=_cond) action = RecordIndexer()._index_action(dict(id=str(auth_record.id), op="index")) assert "title" in action["_source"] assert action["_source"]["simple"] == "simple" action = RecordIndexer()._index_action( dict(id=str(bib_record.id), index="foo", op="index") ) assert "title" in action["_source"] assert action["_source"]["custom"] == "custom" before_record_index.disconnect(_receiver1) before_record_index.disconnect(_receiver2)
def license_record(db, es, sip_metadata_types): """Create a license record.""" licenses = [ Record.create({ "$schema": "https://zenodo.org/schemas/licenses/license-v1.0.0.json", "domain_content": True, "domain_data": True, "domain_software": True, "family": "", "id": "CC-BY-4.0", "maintainer": "Creative Commons", "od_conformance": "approved", "osd_conformance": "not reviewed", "status": "active", "title": "Creative Commons Attribution International 4.0", "url": "https://creativecommons.org/licenses/by/4.0/" }), Record.create({ "$schema": "https://zenodo.org/schemas/licenses/license-v1.0.0.json", "domain_content": True, "domain_data": True, "domain_software": True, "family": "", "id": "CC0-1.0", "maintainer": "Creative Commons", "od_conformance": "approved", "osd_conformance": "not reviewed", "status": "active", "title": "CC0 1.0", "url": "https://creativecommons.org/publicdomain/zero/1.0/" }) ] for license in licenses: PersistentIdentifier.create( pid_type='od_lic', pid_value=license['id'], object_type='rec', object_uuid=license.id, status='R') db.session.commit() for license in licenses: RecordIndexer().index_by_id(license.id) current_search.flush_and_refresh(index='licenses') return licenses[1]
def create_new_license(license): """Create a new license record. :param license: License dictionary to be loaded. :type license: dict """ license = update_legacy_meta(license) license_validator.validate(license) record = Record.create(license) license_minter(record.id, license)