def test_update_expired_embargo(app, test_communities, login_user): """Test record embargo update.""" uploaded_files = {'myfile1.dat': b'contents1', 'myfile2.dat': b'contents2'} with app.app_context(): creator = create_user('creator') non_creator = create_user('non-creator') # create a record with a finishing embargo released_record_data = generate_record_data( open_access=False, embargo_date=datetime.utcnow().isoformat(), ) _, _, released_record = create_record(released_record_data, creator, files=uploaded_files) released_record_id = released_record.id # create a record with anot finished embargo closed_record_data = generate_record_data( open_access=False, # embargo finishes tomorrow embargo_date=(datetime.utcnow() + timedelta(days=1)).isoformat(), ) _, _, closed_record = create_record(closed_record_data, creator, files=uploaded_files) closed_record_id = closed_record.id db.session.commit() # refresh index to make records searchable current_search._client.indices.refresh() def check_embargo(record_id, is_embargoed): with app.app_context(): with app.test_client() as client: login_user(non_creator, client) # test open_access field in record's metadata record = Record.get_record(record_id) assert record['open_access'] != is_embargoed # test record's file access subtest_file_bucket_permissions( client, record.files.bucket, access_level=None if is_embargoed else 'read', is_authenticated=True) # check that both records are under embargo check_embargo(released_record_id, is_embargoed=True) check_embargo(closed_record_id, is_embargoed=True) with app.app_context(): update_expired_embargos.delay() # refresh index to make records searchable current_search._client.indices.refresh() # check that only the released record is not under embargo check_embargo(released_record_id, is_embargoed=False) check_embargo(closed_record_id, is_embargoed=True)
def make_record(test_records_data): creator = create_user('creator') _, pid, record = create_record(test_records_data[0], creator) record['_files'] = [{ 'bucket': "15163455-650b-45e5-9b9f-6cf2ef70a08f", 'checksum': "md5:4653e51dc9b73e020167299ac607e0e1", 'key': "file1.pptx", 'size': 26289470, 'version_id': "389fff57-e6d7-4434-9a44-ca17297be22f", 'ePIC_PID': "http://hdl.handle.net/1234/15163455-650b-45e5-9b9f-6cf2ef70a08f" }, { 'bucket': "51163455-650b-45e5-9b9f-6cf2ef70a08f", 'checksum': "md5:4adfe51dc9b73e020167299ac607e0e1", 'key': "file2.pptx", 'size': 1, 'version_id': "698fff57-e6d7-4434-9a44-ca17297be22f", 'ePIC_PID': "http://hdl.handle.net/1234/51163455-650b-45e5-9b9f-6cf2ef70a08f" }] return pid, record
def test_deposit_create_permission(app, test_users, login_user, test_communities): """Test record draft creation.""" headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] with app.app_context(): community_name = 'MyTestCommunity1' record_data = generate_record_data(community=community_name) community_id = test_communities[community_name] community = Community.get(community_id) creator = create_user('creator') need = create_deposit_need_factory(str(community_id)) allowed = create_user('allowed', permissions=[need]) com_member = create_user('com_member', roles=[community.member_role]) com_admin = create_user('com_admin', roles=[community.admin_role]) deposit, pid, record = create_record(record_data, creator) def restrict_creation(restricted): community.update({'restricted_submission':restricted}) db.session.commit() def test_creation(expected_code, user=None, version_of=None): with app.test_client() as client: if user is not None: login_user(user, client) draft_create_res = client.post( url_for('b2share_records_rest.b2rec_list', version_of=version_of), data=json.dumps(record_data), headers=headers ) assert draft_create_res.status_code == expected_code # test creating a deposit with anonymous user restrict_creation(False) test_creation(401) restrict_creation(True) test_creation(401) # test creating a deposit with a logged in user restrict_creation(False) test_creation(201, creator) restrict_creation(True) test_creation(403, creator) # test with a use who is allowed test_creation(201, allowed) # test with a community member and admin test_creation(201, com_member) test_creation(201, com_admin) # test creating a new version test_creation(401, None, version_of=pid.pid_value) test_creation(403, com_member, version_of=pid.pid_value) restrict_creation(True) test_creation(403, creator, version_of=pid.pid_value) restrict_creation(False) test_creation(201, creator, version_of=pid.pid_value)
def test_deposit_create_permission(app, test_users, login_user, test_communities): """Test record draft creation.""" headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] with app.app_context(): community_name = 'MyTestCommunity1' record_data = generate_record_data(community=community_name) community_id = test_communities[community_name] community = Community.get(community_id) creator = create_user('creator') need = create_deposit_need_factory(str(community_id)) allowed = create_user('allowed', permissions=[need]) com_member = create_user('com_member', roles=[community.member_role]) com_admin = create_user('com_admin', roles=[community.admin_role]) deposit, pid, record = create_record(record_data, creator) def restrict_creation(restricted): community.update({'restricted_submission': restricted}) db.session.commit() def test_creation(expected_code, user=None, version_of=None): with app.test_client() as client: if user is not None: login_user(user, client) draft_create_res = client.post(url_for( 'b2share_records_rest.b2rec_list', version_of=version_of), data=json.dumps(record_data), headers=headers) assert draft_create_res.status_code == expected_code # test creating a deposit with anonymous user restrict_creation(False) test_creation(401) restrict_creation(True) test_creation(401) # test creating a deposit with a logged in user restrict_creation(False) test_creation(201, creator) restrict_creation(True) test_creation(403, creator) # test with a use who is allowed test_creation(201, allowed) # test with a community member and admin test_creation(201, com_member) test_creation(201, com_admin) # test creating a new version test_creation(401, None, version_of=pid.pid_value) test_creation(403, com_member, version_of=pid.pid_value) restrict_creation(True) test_creation(403, creator, version_of=pid.pid_value) restrict_creation(False) test_creation(201, creator, version_of=pid.pid_value)
def test_deposit_create(app, test_records_data, test_users, login_user): """Test record draft creation.""" headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] def create_record(client, record_data): return client.post( url_for('b2share_records_rest.b2rec_list'), data=json.dumps(record_data), headers=headers ) # test creating a deposit with anonymous user with app.app_context(): with app.test_client() as client: draft_create_res = create_record(client, test_records_data[0]) assert draft_create_res.status_code == 401 # test creating a deposit with a logged in user with app.app_context(): with app.test_client() as client: user = test_users['normal'] login_user(user, client) # create the deposit for record_data in test_records_data: draft_create_res = create_record(client, record_data) assert draft_create_res.status_code == 201 draft_create_data = json.loads( draft_create_res.get_data(as_text=True)) expected_metadata = build_expected_metadata( record_data, PublicationStates.draft.name, owners=[user.id], draft=True, PID=draft_create_data['metadata'].get('ePIC_PID'), DOI=draft_create_data['metadata'].get('DOI'), ) assert expected_metadata == draft_create_data['metadata'] subtest_self_link(draft_create_data, draft_create_res.headers, client)
def test_deposit_create(app, test_records_data, test_users, login_user): """Test record draft creation.""" headers = [('Content-Type', 'application/json'), ('Accept', 'application/json')] def create_record(client, record_data): return client.post(url_for('b2share_records_rest.b2rec_list'), data=json.dumps(record_data), headers=headers) # test creating a deposit with anonymous user with app.app_context(): with app.test_client() as client: draft_create_res = create_record(client, test_records_data[0]) assert draft_create_res.status_code == 401 # test creating a deposit with a logged in user with app.app_context(): with app.test_client() as client: user = test_users['normal'] login_user(user, client) # create the deposit for record_data in test_records_data: draft_create_res = create_record(client, record_data) assert draft_create_res.status_code == 201 draft_create_data = json.loads( draft_create_res.get_data(as_text=True)) expected_metadata = build_expected_metadata( record_data, PublicationStates.draft.name, owners=[user.id], draft=True, PID=draft_create_data['metadata'].get('ePIC_PID'), DOI=draft_create_data['metadata'].get('DOI'), ) assert expected_metadata == draft_create_data['metadata'] subtest_self_link(draft_create_data, draft_create_res.headers, client)
def test_record_content(app, test_communities, login_user, test_users): """Test record read with REST API.""" uploaded_files = { 'myfile1.dat': b'contents1', 'myfile2.dat': b'contents2' } admin = test_users['admin'] with app.app_context(): creator = create_user('creator') non_creator = create_user('non-creator') record_data = generate_record_data() _, record_pid, record = create_record( record_data, creator, files=uploaded_files ) with app.test_client() as client: login_user(creator, client) headers = [('Accept', 'application/json')] request_res = client.get( url_for('b2share_records_rest.b2rec_item', pid_value=record_pid.pid_value), headers=headers) assert request_res.status_code == 200 request_data = json.loads( request_res.get_data(as_text=True)) assert 'created' in request_data expected_metadata = build_expected_metadata( record_data, PublicationStates.published.name, owners=[creator.id], PID=request_data['metadata'].get('ePIC_PID'), DOI=request_data['metadata'].get('DOI'), ) assert request_data['metadata'] == expected_metadata # check that the link to the bucket is correctly generated expected_bucket_link = url_for_bucket(record.files.bucket) assert request_data['links']['files'] == expected_bucket_link # test self link subtest_self_link(request_data, request_res.headers, client)
def test_modify(status, user=None): patch = [{ "op": "replace", "path": "/titles", "value": [{'title':'newtitle'}] }] with app.test_client() as client: _, record_pid, record = create_record(record_data, creator) if user is not None: login_user(user, client) # test patching the document headers = [('Content-Type', 'application/json-patch+json'), ('Accept', 'application/json')] request_res = client.patch( url_for('b2share_records_rest.b2rec_item', pid_value=record_pid.pid_value), data=json.dumps(patch), headers=headers) assert request_res.status_code == status
def test_record_publish_with_external_pids( app, login_user, records_data_with_external_pids): #test_users, test_communities """Test record external files and handle allocation.""" uploaded_files = {'myfile1.dat': b'contents1', 'myfile2.dat': b'contents2'} with app.app_context(): app.config.update({'FAKE_EPIC_PID': True}) creator = create_user('creator') external_pids = records_data_with_external_pids['external_pids'] record_data = generate_record_data(external_pids=external_pids) _, record_pid, record = create_record(record_data, creator, files=uploaded_files) with app.test_client() as client: login_user(creator, client) headers = [('Accept', 'application/json')] request_res = client.get(url_for('b2share_records_rest.b2rec_item', pid_value=record_pid.pid_value), headers=headers) assert request_res.status_code == 200 record = json.loads(request_res.get_data(as_text=True)) assert len( record['files']) == len(external_pids) + len(uploaded_files) for f in record['files']: assert f['ePIC_PID'] if f['key'] in uploaded_files: # uploaded (internal) file assert '0000' in f[ 'ePIC_PID'] # freshly allocated fake pid else: # external file assert f['b2safe'] x_pid = [ rec for rec in external_pids if rec['key'] == f['key'] ][0] assert f['ePIC_PID'] == x_pid['ePIC_PID']
def test_record_publish_with_external_pids(app, login_user, records_data_with_external_pids): #test_users, test_communities """Test record external files and handle allocation.""" uploaded_files = { 'myfile1.dat': b'contents1', 'myfile2.dat': b'contents2' } with app.app_context(): app.config.update({'FAKE_EPIC_PID': True}) creator = create_user('creator') external_pids = records_data_with_external_pids['external_pids'] record_data = generate_record_data(external_pids=external_pids) _, record_pid, record = create_record( record_data, creator, files=uploaded_files ) with app.test_client() as client: login_user(creator, client) headers = [('Accept', 'application/json')] request_res = client.get( url_for('b2share_records_rest.b2rec_item', pid_value=record_pid.pid_value), headers=headers) assert request_res.status_code == 200 record = json.loads(request_res.get_data(as_text=True)) assert len(record['files']) == len(external_pids) + len(uploaded_files) for f in record['files']: assert f['ePIC_PID'] if f['key'] in uploaded_files: # uploaded (internal) file assert '0000' in f['ePIC_PID'] # freshly allocated fake pid else: # external file assert f['b2safe'] x_pid = [rec for rec in external_pids if rec['key'] == f['key']][0] assert f['ePIC_PID'] == x_pid['ePIC_PID']
def make_record(test_records_data): creator = create_user('creator') _, pid, record = create_record(test_records_data[0], creator) record['_files'] = [ { 'bucket': "15163455-650b-45e5-9b9f-6cf2ef70a08f", 'checksum': "md5:4653e51dc9b73e020167299ac607e0e1", 'key': "file1.pptx", 'size': 26289470, 'version_id': "389fff57-e6d7-4434-9a44-ca17297be22f", 'ePIC_PID': "http://hdl.handle.net/1234/15163455-650b-45e5-9b9f-6cf2ef70a08f" }, { 'bucket': "51163455-650b-45e5-9b9f-6cf2ef70a08f", 'checksum': "md5:4adfe51dc9b73e020167299ac607e0e1", 'key': "file2.pptx", 'size': 1, 'version_id': "698fff57-e6d7-4434-9a44-ca17297be22f", 'ePIC_PID': "http://hdl.handle.net/1234/51163455-650b-45e5-9b9f-6cf2ef70a08f" } ] return pid, record
def test_record_read_permissions(app, test_communities, login_user, test_users): """Test record read with REST API.""" uploaded_files = { 'myfile1.dat': b'contents1', 'myfile2.dat': b'contents2' } admin = test_users['admin'] with app.app_context(): creator = create_user('creator') non_creator = create_user('non-creator') open_record_data = generate_record_data(open_access=True) _, open_record_pid, open_record = create_record( open_record_data, creator, files=uploaded_files ) closed_record_data = generate_record_data(open_access=False) _, closed_record_pid, closed_record = create_record( closed_record_data, creator, files=uploaded_files) with app.test_client() as client: login_user(creator, client) subtest_file_bucket_content(client, open_record.files.bucket, uploaded_files) subtest_file_bucket_content(client, closed_record.files.bucket, uploaded_files) def test_get(pid, record, status, user=None, files_access=None): with app.test_client() as client: if user is not None: login_user(user, client) headers = [('Accept', 'application/json')] request_res = client.get( url_for('b2share_records_rest.b2rec_item', pid_value=pid.pid_value), headers=headers) request_data = json.loads( request_res.get_data(as_text=True)) assert request_res.status_code == status # check that the permissions to the file bucket is correct subtest_file_bucket_permissions( client, record.files.bucket, access_level=files_access, is_authenticated=user is not None ) # test with anonymous user test_get(open_record_pid, open_record, 200, files_access='read') test_get(closed_record_pid, closed_record, 200) test_get(open_record_pid, open_record, 200, non_creator, files_access='read') test_get(closed_record_pid, closed_record, 200, non_creator) test_get(open_record_pid, open_record, 200, creator, files_access='read') test_get(closed_record_pid, closed_record, 200, creator, files_access='read') test_get(open_record_pid, open_record, 200, admin, files_access='read') test_get(closed_record_pid, closed_record, 200, admin, files_access='read')
def test_delete_record(app, test_users, test_communities, login_user, script_info): """Test record deletion through the REST API.""" from click.testing import CliRunner from invenio_search import current_search_client from invenio_indexer import cli from invenio_indexer.tasks import process_bulk_queue uploaded_files = { 'myfile1.dat': b'contents1', 'myfile2.dat': b'contents2' } admin = test_users['admin'] headers = [('Accept', 'application/json')] with app.app_context(): creator = create_user('creator') non_creator = create_user('non_creator') record_data = generate_record_data() with app.test_client() as client: deposit, record_pid, record = create_record( record_data, creator, files=uploaded_files ) pid_value = record_pid.pid_value record_id = record.id bucket_id = record.files.bucket.id object_version = record.files.bucket.objects[0] deposit_bucket_id = deposit.files.bucket.id deposit_object_version = deposit.files.bucket.objects[0] record_url = url_for('b2share_records_rest.b2rec_item', pid_value=pid_value) deposit_url = url_for('b2share_deposit_rest.b2dep_item', pid_value=pid_value) bucket_url = url_for('invenio_files_rest.bucket_api', bucket_id=bucket_id) deposit_bucket_url = url_for('invenio_files_rest.bucket_api', bucket_id=deposit_bucket_id) object_version_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket_id, version=object_version.version_id, key=object_version.key ) deposit_object_version_url = url_for( 'invenio_files_rest.object_api', bucket_id=deposit_bucket_id, version=deposit_object_version.version_id, key=deposit_object_version.key ) # check that the record and deposit are searchable current_search_client.indices.flush('*') res = current_search_client.search(index='records') assert res['hits']['total'] == 1 res = current_search_client.search(index='deposits') assert res['hits']['total'] == 1 def test_delete(status, user=None): with app.test_client() as client: if user is not None: login_user(user, client) # delete the record request_res = client.delete(record_url, headers=headers) assert request_res.status_code == status def test_access(user=None, deleted=True): with app.test_client() as client: if user is not None: login_user(user, client) # try accessing the record request_res = client.get(record_url, headers=headers) assert request_res.status_code == 410 if deleted else 200 # try accessing the file bucket request_res = client.get(bucket_url, headers=headers) assert request_res.status_code == 404 if deleted else 200 # try accessing the file request_res = client.get(object_version_url, headers=headers) assert request_res.status_code == 404 if deleted else 200 # try accessing the deposit request_res = client.get(deposit_url, headers=headers) assert request_res.status_code == 410 if deleted else 200 # try accessing the deposit file bucket request_res = client.get(deposit_bucket_url, headers=headers) assert request_res.status_code == 404 if deleted else 200 # try accessing the deposit file request_res = client.get(deposit_object_version_url, headers=headers) assert request_res.status_code == 404 if deleted else 200 # Check that everything is accessible test_access(creator, deleted=False) test_delete(401) # anonymous user test_delete(403, creator) test_delete(403, non_creator) test_delete(200, admin) test_access() # anonymous user test_access(creator) test_access(non_creator) test_access(admin) # Check that reindexing records does not index deleted records and deposits with app.app_context(): runner = CliRunner() # Initialize queue res = runner.invoke(cli.queue, ['init', 'purge'], obj=script_info) assert 0 == res.exit_code # schedule a reindex task res = runner.invoke(cli.reindex, ['--yes-i-know'], obj=script_info) assert 0 == res.exit_code res = runner.invoke(cli.run, [], obj=script_info) assert 0 == res.exit_code # execute scheduled tasks synchronously process_bulk_queue.delay() # flush the indices so that indexed records are searchable current_search_client.indices.flush('*') # check that the record and deposit are not indexed res = current_search_client.search(index='records') assert res['hits']['total'] == 0 res = current_search_client.search(index='deposits') assert res['hits']['total'] == 0
def test_update_expired_embargo(app, test_communities, login_user, cli_cmd): """Test record embargo update.""" uploaded_files = { 'myfile1.dat': b'contents1', 'myfile2.dat': b'contents2' } with app.app_context(): creator = create_user('creator') non_creator = create_user('non-creator') # create a record with a finishing embargo released_record_data = generate_record_data( open_access=False, embargo_date=datetime.utcnow().isoformat(), ) _, _, released_record = create_record( released_record_data, creator, files=uploaded_files ) released_record_id = released_record.id # create a record with anot finished embargo closed_record_data = generate_record_data( open_access=False, # embargo finishes tomorrow embargo_date=(datetime.utcnow() + timedelta(days=1)).isoformat(), ) _, _, closed_record = create_record( closed_record_data, creator, files=uploaded_files ) closed_record_id = closed_record.id db.session.commit() # refresh index to make records searchable current_search._client.indices.refresh() def check_embargo(record_id, is_embargoed): with app.app_context(): with app.test_client() as client: login_user(non_creator, client) # test open_access field in record's metadata record = Record.get_record(record_id) assert record['open_access'] != is_embargoed # test record's file access subtest_file_bucket_permissions( client, record.files.bucket, access_level=None if is_embargoed else 'read', is_authenticated=True ) # check that both records are under embargo check_embargo(released_record_id, is_embargoed=True) check_embargo(closed_record_id, is_embargoed=True) with app.app_context(): if not cli_cmd: update_expired_embargoes.delay() else: script_info = ScriptInfo(create_app=lambda info: app) runner = CliRunner() result = runner.invoke(b2records, ['update_expired_embargoes'], obj=script_info) assert result.exit_code == 0 # refresh index to make records searchable current_search._client.indices.refresh() # check that only the released record is not under embargo check_embargo(released_record_id, is_embargoed=False) check_embargo(closed_record_id, is_embargoed=True)
def test_delete_record(app, test_users, test_communities, login_user, script_info): """Test record deletion through the REST API.""" from click.testing import CliRunner from invenio_search import current_search_client from invenio_indexer import cli from invenio_indexer.tasks import process_bulk_queue uploaded_files = { 'myfile1.dat': b'contents1', 'myfile2.dat': b'contents2' } admin = test_users['admin'] headers = [('Accept', 'application/json')] with app.app_context(): creator = create_user('creator') non_creator = create_user('non_creator') record_data = generate_record_data() with app.test_client() as client: deposit, record_pid, record = create_record( record_data, creator, files=uploaded_files ) pid_value = record_pid.pid_value record_id = record.id bucket_id = record.files.bucket.id object_version = record.files.bucket.objects[0] deposit_bucket_id = deposit.files.bucket.id deposit_object_version = deposit.files.bucket.objects[0] record_url = url_for('b2share_records_rest.b2rec_item', pid_value=pid_value) deposit_url = url_for('b2share_deposit_rest.b2dep_item', pid_value=pid_value) bucket_url = url_for('invenio_files_rest.bucket_api', bucket_id=bucket_id) deposit_bucket_url = url_for('invenio_files_rest.bucket_api', bucket_id=deposit_bucket_id) object_version_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket_id, version=object_version.version_id, key=object_version.key ) deposit_object_version_url = url_for( 'invenio_files_rest.object_api', bucket_id=deposit_bucket_id, version=deposit_object_version.version_id, key=deposit_object_version.key ) # check that the record and deposit are searchable current_search_client.indices.flush('*') res = current_search_client.search(index='records') assert res['hits']['total'] == 1 res = current_search_client.search(index='deposits') assert res['hits']['total'] == 1 def test_delete(status, user=None): with app.test_client() as client: if user is not None: login_user(user, client) # delete the record request_res = client.delete(record_url, headers=headers) assert request_res.status_code == status def test_access(user=None, deleted=True): with app.test_client() as client: if user is not None: login_user(user, client) # try accessing the record request_res = client.get(record_url, headers=headers) assert request_res.status_code == 410 if deleted else 200 # try accessing the file bucket request_res = client.get(bucket_url, headers=headers) assert request_res.status_code == 404 if deleted else 200 # try accessing the file request_res = client.get(object_version_url, headers=headers) assert request_res.status_code == 404 if deleted else 200 # try accessing the deposit request_res = client.get(deposit_url, headers=headers) assert request_res.status_code == 410 if deleted else 200 # try accessing the deposit file bucket request_res = client.get(deposit_bucket_url, headers=headers) assert request_res.status_code == 404 if deleted else 200 # try accessing the deposit file request_res = client.get(deposit_object_version_url, headers=headers) assert request_res.status_code == 404 if deleted else 200 # Check that everything is accessible test_access(creator, deleted=False) test_delete(401) # anonymous user test_delete(403, creator) test_delete(403, non_creator) test_delete(204, admin) test_access() # anonymous user test_access(creator) test_access(non_creator) test_access(admin) # Check that reindexing records does not index deleted records and deposits with app.app_context(): runner = CliRunner() # Initialize queue res = runner.invoke(cli.queue, ['init', 'purge'], obj=script_info) assert 0 == res.exit_code # schedule a reindex task res = runner.invoke(cli.reindex, ['--yes-i-know'], obj=script_info) assert 0 == res.exit_code res = runner.invoke(cli.run, [], obj=script_info) assert 0 == res.exit_code # execute scheduled tasks synchronously process_bulk_queue.delay() # flush the indices so that indexed records are searchable current_search_client.indices.flush('*') # check that the record and deposit are not indexed res = current_search_client.search(index='records') assert res['hits']['total'] == 0 res = current_search_client.search(index='deposits') assert res['hits']['total'] == 0