def test_post_invalid_partsizes(client, headers, bucket, get_json, admin_user): """Test invalid multipart init.""" login_user(client, admin_user) # Part size too large res = client.post(obj_url(bucket), query_string='uploads', headers=headers, data=json.dumps({ 'size': 30, 'partSize': 21 })) assert res.status_code == 400 # Part size too small res = client.post(obj_url(bucket), query_string='uploads', headers=headers, data=json.dumps({ 'size': 30, 'partSize': 1 })) assert res.status_code == 400 # Size too large res = client.post(obj_url(bucket), query_string='uploads', headers=headers, data=json.dumps({ 'size': 2 * 100 + 1, 'partSize': 2 })) assert res.status_code == 400
def test_post_locked_bucket(client, db, headers, bucket, get_json, admin_user): """Test invalid multipart init.""" login_user(client, admin_user) bucket.locked = True db.session.commit() res = client.post(obj_url(bucket), query_string='uploads', headers=headers, data=json.dumps({ 'size': 10, 'partSize': 2 })) assert res.status_code == 403 bucket.deleted = True db.session.commit() res = client.post(obj_url(bucket), query_string='uploads', headers=headers, data=json.dumps({ 'size': 10, 'partSize': 2 })) assert res.status_code == 404
def test_post_size_limits(client, db, headers, bucket, admin_user): """Test invalid multipart init.""" login_user(client, admin_user) bucket.quota_size = 100 db.session.commit() # Bucket quota exceed res = client.post(obj_url(bucket), query_string='uploads', headers=headers, data=json.dumps({ 'size': 101, 'partSize': 20 })) assert res.status_code == 400 bucket.max_file_size = 50 db.session.commit() # Max file size exceeded res = client.post(obj_url(bucket), query_string='uploads', headers=headers, data=json.dumps({ 'size': 51, 'partSize': 20 })) assert res.status_code == 400
def test_get(client, headers, permissions, bucket, objects, get_json): """Test listing objects.""" cases = [ (None, 404), ('auth', 404), ('objects', 404), # TODO - return 403 instead ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) # Existing bucket resp = client.get(url_for( 'invenio_files_rest.bucket_api', bucket_id=bucket.id, ), headers=headers) assert resp.status_code == expected if resp.status_code == 200: data = get_json(resp) assert len(data['contents']) == 2 assert all([x['is_head'] for x in data['contents']]) # Non-existing bucket resp = client.get(url_for( 'invenio_files_rest.bucket_api', bucket_id='invalid', ), headers=headers) assert resp.status_code == 404
def test_signals(app, client, headers, bucket, permissions): """Test file_uploaded and file_deleted signals.""" login_user(client, permissions['bucket']) key = 'myfile.txt' data = b'content of my file' object_url = url_for('invenio_files_rest.object_api', bucket_id=bucket.id, key=key) calls = [] def upload_listener(obj): calls.append("file-uploaded") def delete_listener(obj): calls.append("file-deleted") file_uploaded.connect(upload_listener, weak=False) file_deleted.connect(delete_listener, weak=False) try: client.put(object_url, input_stream=BytesIO(data), headers={'Content-Type': 'application/octet-stream'}) client.delete(object_url) assert calls == ["file-uploaded", "file-deleted"] finally: file_uploaded.disconnect(upload_listener) file_deleted.disconnect(delete_listener)
def test_put_header_invalid_tags(app, client, bucket, permissions, get_md5, get_json): """Test upload of an object with tags in the headers.""" header_name = app.config['FILES_REST_FILE_TAGS_HEADER'] invalid = [ # We don't test zero-length values/keys, because they are filtered out # from parse_qsl ('a'*256, 'valid'), ('valid', 'b'*256), ] login_user(client, permissions['bucket']) # Invalid key or values for k, v in invalid: resp = client.put( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key='k'), input_stream=BytesIO(b'updated_content'), headers={header_name: '{}={}'.format(k, v)}, ) assert resp.status_code == 400 # Duplicate key resp = client.put( url_for('invenio_files_rest.object_api', bucket_id=bucket.id, key='k'), input_stream=BytesIO(b'updated_content'), headers={header_name: 'a=1&a=2'}, ) assert resp.status_code == 400
def test_get(client, headers, bucket, objects, permissions): """Test getting an object.""" cases = [ (None, 404), ('auth', 404), ('bucket', 200), ('location', 200), ('objects', 200), ] for user, expected in cases: login_user(client, permissions[user]) for obj in objects: object_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, ) # Get specifying version (of latest obj). resp = client.get( object_url, query_string='versionId={0}'.format(obj.version_id), headers=headers) assert resp.status_code == expected # Get latest resp = client.get(object_url, headers=headers) assert resp.status_code == expected if resp.status_code == 200: # Strips prefix 'md5:' from checksum value. assert resp.content_md5 == obj.file.checksum[4:] assert resp.get_etag()[0] == obj.file.checksum
def test_delete_locked_deleted(client, db, bucket, versions, admin_user): """Test a deleted/locked bucket.""" obj = versions[0] object_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key) # Locked bucket bucket.locked = True db.session.commit() login_user(client, admin_user) # Latest version resp = client.delete(object_url) assert resp.status_code == 403 # Previous version resp = client.delete( object_url, query_string='versionId={0}'.format(obj.version_id)) assert resp.status_code == 403 # Deleted bucket bucket.deleted = True db.session.commit() # Latest version resp = client.delete(object_url) assert resp.status_code == 404 # Previous version resp = client.delete( object_url, query_string='versionId={0}'.format(obj.version_id)) assert resp.status_code == 404
def test_head(client, headers, bucket, permissions): """Test checking existence of bucket.""" cases = [ (None, 404), ('auth', 404), ('objects', 404), # TODO - return 403 instead ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) # Existing bucket resp = client.head( url_for('invenio_files_rest.bucket_api', bucket_id=bucket.id), headers=headers, ) assert resp.status_code == expected assert not resp.data # Non-existing bucket assert client.head( url_for('invenio_files_rest.bucket_api', bucket_id='invalid'), headers=headers, ).status_code == 404
def test_get_empty_bucket(db, client, headers, bucket, objects, permissions, get_json): """Test getting objects from an empty bucket.""" # Delete the objects created in the fixtures to have an empty bucket with # permissions set up. for obj in objects: ObjectVersion.delete(obj.bucket_id, obj.key) db.session.commit() cases = [ (None, 404), ('auth', 404), ('objects', 404), # TODO - return 403 instead ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) resp = client.get( url_for('invenio_files_rest.bucket_api', bucket_id=bucket.id), headers=headers ) assert resp.status_code == expected if resp.status_code == 200: assert get_json(resp)['contents'] == []
def test_post_complete(client, headers, permissions, bucket, multipart, multipart_url, parts, get_json, user, expected): """Test complete multipart upload.""" login_user(client, permissions[user]) # Initiate multipart upload with patch('invenio_files_rest.views.merge_multipartobject') as task: res = client.post(multipart_url) assert res.status_code == expected if res.status_code == 200: data = get_json(res) assert data['completed'] is True assert task.called_with(str(multipart.upload_id)) # Cannot complete the multipart object a second time res = client.post(multipart_url) assert res.status_code == 403 # Multipart object still exists. data = get_json(client.get(multipart_url), code=200) assert data['completed'] is True assert len(data['parts']) == multipart.last_part_number + 1 # Object doesn't exists yet. assert client.get(data['links']['object']).status_code == 404 # Run merge merge_multipartobject(str(multipart.upload_id)) # Multipart object no longer exists assert client.get(multipart_url).status_code == 404 # Object exists assert client.get(data['links']['object']).status_code == 200
def test_get(client, db, bucket, permissions, multipart, multipart_url, get_json): """Test get parts.""" Part.create(multipart, 0) Part.create(multipart, 1) Part.create(multipart, 3) db.session.commit() cases = [ (None, 404), ('auth', 404), ('objects', 404), ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) res = client.get(multipart_url) assert res.status_code == expected if res.status_code == 200: data = get_json(res) assert len(data['parts']) == 3
def test_get_versions(client, headers, bucket, versions, permissions): """Test object version getting.""" cases = [ (None, 404), ('auth', 404), ('objects', 403), ('bucket', 403), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) for obj in versions: if obj.is_head is True: continue resp = client.get( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, ), query_string=dict(versionId=obj.version_id) ) assert resp.status_code == expected if resp.status_code == 200: # Strips prefix 'md5:' from checksum value. assert resp.content_md5 == obj.file.checksum[4:] assert resp.get_etag()[0] == obj.file.checksum
def test_already_exhausted_input_stream(app, client, db, bucket, admin_user): """Test server error when file stream is already read.""" key = 'test.json' data = b'{"json": "file"}' object_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=key) # Add a new before request hook which reads the incoming request payload. # This simulates what happens when Sentry's raven-python library when it # reads the JSON payloads, breaking the upload of JSON files # (`application/json`). def consume_request_input_stream(*args): """Reads input stream object.""" from flask import request request.data app.before_request(consume_request_input_stream) login_user(client, admin_user) resp = client.put( object_url, input_stream=BytesIO(data), ) assert resp.status_code == 500 resp = client.post( object_url, input_stream=BytesIO(data), ) assert resp.status_code == 500
def test_put_file_size_errors(client, db, bucket, quota_size, max_file_size, expected, err, admin_user): """Test that file size errors are properly raised.""" login_user(client, admin_user) filedata = b'a' * 75 object_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key='test.txt') # Set quota and max file size bucket.quota_size = quota_size bucket.max_file_size = max_file_size db.session.commit() # Test set limits. resp = client.put(object_url, input_stream=BytesIO(filedata)) assert resp.status_code == expected # Test correct error message. if err: assert err in resp.get_data(as_text=True) # Test that versions are counted. if max_file_size == 100 and quota_size == 100: resp = client.put(object_url, input_stream=BytesIO(filedata)) assert resp.status_code == 400
def test_get_empty(client, multipart, multipart_url, get_json, admin_user): """Test get parts when empty.""" login_user(client, admin_user) data = get_json(client.get(multipart_url), code=200) assert len(data['parts']) == 0 assert data['id'] == str(multipart.upload_id)
def test_post_complete_fail(client, headers, bucket, multipart, multipart_url, parts, get_json, admin_user): """Test completing multipart when merge fails.""" login_user(client, admin_user) # Mock celery task to emulate real usage. task_result = MagicMock() task_result.ready = MagicMock(side_effect=[False, False, True]) task_result.successful = MagicMock(return_value=False) # Complete multipart upload with patch('invenio_files_rest.views.merge_multipartobject') as task: task.delay = MagicMock(return_value=task_result) res = client.post(multipart_url) data = get_json(res, code=200) assert res.data.startswith(b' {') assert data['status'] == 500 assert data['message'] == 'Job failed.' # Multipart object still exists. data = get_json(client.get(multipart_url), code=200) assert data['completed'] is True # Object doesn't exists yet. assert client.get(data['links']['object']).status_code == 404
def test_get_versions_invalid(client, headers, bucket, objects, permissions): """Test object version getting.""" cases = [ None, 'auth', 'objects', 'bucket', 'location', ] versions = [ ('c1057411-ad8a-4e4f-ac0e-f6f8b395d277', 404), ('invalid', 422), # Not a UUID ] for user in cases: login_user(client, permissions[user]) for v, expected in versions: for obj in objects: resp = client.get( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, ), query_string=dict(versionId=v) ) assert resp.status_code == expected
def test_get_with_x_sendfile(client, headers, bucket, objects, permissions, offload_file_serving): """Test getting a redirect to an object.""" login_user(client, permissions['bucket']) for obj in objects: object_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, ) # Get specifying version (of latest obj). resp = client.get(object_url, query_string='versionId={0}'.format(obj.version_id), headers=headers) assert resp.status_code == 200 assert resp.headers['X-Accel-Redirect'].startswith('/user_files/') resp = client.delete( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, )) resp = client.get( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, )) assert resp.status_code == 404
def test_put(client, db, bucket, permissions, multipart, multipart_url, get_md5, get_json): """Test part upload.""" cases = [ (None, 404), ('auth', 404), ('objects', 404), # TODO - use 403 instead ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) data = b'a' * multipart.chunk_size res = client.put( multipart_url + '&partNumber={0}'.format(1), input_stream=BytesIO(data), ) assert res.status_code == expected if res.status_code == 200: assert res.get_etag()[0] == get_md5(data) # Assert content with open(multipart.file.uri, 'rb') as fp: fp.seek(multipart.chunk_size) content = fp.read(multipart.chunk_size) assert content == data assert Part.count(multipart) == 1 assert Part.get_or_none(multipart, 1).checksum == get_md5(data)
def test_delete_versions(client, db, bucket, versions, permissions, user, expected): """Test deleting an object.""" login_user(client, permissions[user]) for obj in versions: # Valid delete resp = client.delete( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, versionId=obj.version_id, )) assert resp.status_code == expected if resp.status_code == 204: assert not ObjectVersion.get( bucket.id, obj.key, version_id=obj.version_id) # Invalid object assert client.delete( url_for('invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, versionId='deadbeef-65bd-4d9b-93e2-ec88cc59aec5') ).status_code == 404
def test_delete(client, db, bucket, objects, permissions, user, expected): """Test deleting an object.""" login_user(client, permissions[user]) for obj in objects: # Valid object resp = client.delete( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, )) assert resp.status_code == expected if resp.status_code == 204: assert not ObjectVersion.get(bucket.id, obj.key) resp = client.get( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, )) assert resp.status_code == 404 else: assert ObjectVersion.get(bucket.id, obj.key) # Invalid object assert client.delete( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key='invalid', )).status_code == 404
def test_put_versioning(client, bucket, permissions, get_md5, get_json): """Test versioning feature.""" key = 'test.txt' files = [b'v1', b'v2'] object_url = url_for('invenio_files_rest.object_api', bucket_id=bucket.id, key=key) # Upload to same key twice login_user(client, permissions['location']) for f in files: resp = client.put(object_url, input_stream=BytesIO(f)) assert resp.status_code == 200 # Assert we have two versions resp = client.get(url_for( 'invenio_files_rest.bucket_api', bucket_id=bucket.id, ), query_string='versions=1') data = get_json(resp, code=200) assert len(data['contents']) == 2 # Assert we can get both versions for item in data['contents']: assert client.get(item['links']['self']).status_code == 200
def test_put(client, bucket, permissions, get_md5, get_json): """Test upload of an object.""" cases = [ (None, 404), ('auth', 404), ('objects', 404), ('bucket', 200), ('location', 200), ] key = 'test.txt' data = b'updated_content' checksum = get_md5(data, prefix=True) object_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=key) for user, expected in cases: login_user(client, permissions[user]) resp = client.put( object_url, input_stream=BytesIO(data), ) assert resp.status_code == expected if expected == 200: assert resp.get_etag()[0] == checksum resp = client.get(object_url) assert resp.status_code == 200 assert resp.data == data assert resp.content_md5 == checksum[4:]
def test_post_init(client, headers, permissions, bucket, get_json): """Test init multipart upload.""" cases = [ (None, 404), ('auth', 404), ('objects', 404), # TODO - use 403 instead ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) # Initiate multipart upload res = client.post( obj_url(bucket), query_string='uploads', headers={'Content-Type': 'application/json'}, data=json.dumps({ 'size': 10, 'partSize': 4, }) ) assert res.status_code == expected if res.status_code == 200: data = get_json(res) expected_keys = [ 'id', 'bucket', 'completed', 'size', 'part_size', 'last_part_number', 'last_part_size', 'links' ] for k in expected_keys: assert k in data
def test_get_empty_bucket(db, client, headers, bucket, objects, permissions, get_json): """Test getting objects from an empty bucket.""" # Delete the objects created in the fixtures to have an empty bucket with # permissions set up. for obj in objects: ObjectVersion.delete(obj.bucket_id, obj.key) db.session.commit() cases = [ (None, 404), ('auth', 404), ('objects', 404), # TODO - return 403 instead ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) resp = client.get(url_for('invenio_files_rest.bucket_api', bucket_id=bucket.id), headers=headers) assert resp.status_code == expected if resp.status_code == 200: assert get_json(resp)['contents'] == []
def test_get_serialization(client, multipart, multipart_url, get_json, admin_user): """Test get parts when empty.""" login_user(client, admin_user) client.put( multipart_url + '&partNumber={0}'.format(1), input_stream=BytesIO(b'a' * multipart.chunk_size), ) data = get_json(client.get(multipart_url), code=200) assert len(data['parts']) == 1 expected_keys = [ 'part_number', 'start_byte', 'end_byte', 'checksum', 'created', 'updated' ] for k in expected_keys: assert k in data['parts'][0] expected_keys = [ 'id', 'bucket', 'key', 'completed', 'size', 'part_size', 'links', 'last_part_number', 'last_part_size', 'created', 'updated', ] for k in expected_keys: assert k in data
def test_post_init(client, headers, permissions, bucket, get_json): """Test init multipart upload.""" cases = [ (None, 404), ('auth', 404), ('objects', 404), # TODO - use 403 instead ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) # Initiate multipart upload res = client.post( obj_url(bucket), query_string='uploads', headers={'Content-Type': 'application/json'}, data=json.dumps({ 'size': 10, 'part_size': 4, }) ) assert res.status_code == expected if res.status_code == 200: data = get_json(res) expected_keys = [ 'id', 'bucket', 'completed', 'size', 'part_size', 'last_part_number', 'last_part_size', 'links' ] for k in expected_keys: assert k in data
def test_post_complete_timeout(app, client, headers, bucket, multipart, multipart_url, parts, get_json, admin_user): """Test completing multipart when merge fails.""" login_user(client, admin_user) max_rounds = int( app.config['FILES_REST_TASK_WAIT_MAX_SECONDS'] // app.config['FILES_REST_TASK_WAIT_INTERVAL']) # Mock celery task to emulate real usage. task_result = MagicMock() task_result.ready = MagicMock(return_value=False) # Complete multipart upload with patch('invenio_files_rest.views.merge_multipartobject') as task: task.delay = MagicMock(return_value=task_result) res = client.post(multipart_url) data = get_json(res, code=200) assert res.data.startswith(b' ' * max_rounds) assert data['status'] == 500 assert data['message'] == 'Job timed out.' # Multipart object still exists. data = get_json(client.get(multipart_url), code=200) assert data['completed'] is True # Object doesn't exists yet. assert client.get(data['links']['object']).status_code == 404
def test_get_versions(client, headers, permissions, bucket, objects, get_json): """Test listing objects.""" cases = [ (None, 404), ('auth', 404), ('bucket', 403), # User already knowns bucket exists. ('objects', 404), # TODO - return 403 instead ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) resp = client.get(url_for( 'invenio_files_rest.bucket_api', bucket_id=bucket.id, versions='1', ), headers=headers) assert resp.status_code == expected if resp.status_code == 200: data = get_json(resp) assert len(data['contents']) == 4 assert data['id'] == str(bucket.id)
def test_put_badstream(client, db, bucket, multipart, multipart_url, get_json, admin_user): """Test uploading to a completed multipart upload.""" login_user(client, admin_user) client.put( multipart_url + '&partNumber={0}'.format(1), input_stream=BytesIO(b'a' * multipart.chunk_size), ) # Part exists data = get_json(client.get(multipart_url), code=200) assert len(data['parts']) == 1 pytest.raises( ValueError, client.put, multipart_url + '&partNumber={0}'.format(1), input_stream=BadBytesIO(b'b' * multipart.chunk_size), ) # Part was removed due to faulty upload which might have written partial # content to the file. data = get_json(client.get(multipart_url), code=200) assert len(data['parts']) == 0
def test_post_init_querystring(client, bucket, get_json, admin_user): """Test init multipart upload.""" login_user(client, admin_user) res = client.post( obj_url(bucket), query_string='uploads&size=10&partSize=4', ) assert res.status_code == 200
def test_head_locked_deleted(client, db, headers, bucket, permissions): """Test checking existence of bucket.""" bucket_url = url_for('invenio_files_rest.bucket_api', bucket_id=bucket.id) login_user(client, permissions['location']) # Locked bucket bucket.locked = True db.session.commit() assert client.head(bucket_url).status_code == 200 # Deleted bucket bucket.deleted = True db.session.commit() assert client.head(bucket_url).status_code == 404
def test_get_listuploads(client, db, bucket, multipart, multipart_url, permissions, parts, get_json): """Test get list of multipart uploads in bucket.""" cases = [ (None, 404), ('auth', 404), ('objects', 404), ('bucket', 404), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) res = client.get(url_for( 'invenio_files_rest.bucket_api', bucket_id=str(bucket.id), ) + '?uploads') assert res.status_code == expected
def test_get_not_found(client, headers, bucket, permissions): """Test getting a non-existing object.""" cases = [ None, 'auth', 'bucket', 'objects', 'location', ] for user in cases: login_user(client, permissions[user]) resp = client.get( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key='non-existing.pdf', ), headers=headers, ) assert resp.status_code == 404
def test_post_bucket(app, client, headers, dummy_location, permissions, user, expected): """Test post a bucket.""" expected_keys = [ 'id', 'links', 'size', 'quota_size', 'max_file_size', 'locked', 'created', 'updated'] params = [{}, {'location_name': dummy_location.name}] login_user(client, permissions[user]) for data in params: resp = client.post( url_for('invenio_files_rest.location_api'), data=data, headers=headers ) assert resp.status_code == expected if resp.status_code == 200: resp_json = get_json(resp) for key in expected_keys: assert key in resp_json assert Bucket.get(resp_json['id'])
def test_get(client, headers, permissions, bucket, objects, get_json): """Test listing objects.""" cases = [ (None, 404), ('auth', 404), ('objects', 404), # TODO - return 403 instead ('bucket', 200), ('location', 200), ] for user, expected in cases: login_user(client, permissions[user]) # Existing bucket resp = client.get(url_for( 'invenio_files_rest.bucket_api', bucket_id=bucket.id, ), headers=headers) assert resp.status_code == expected if resp.status_code == 200: data = get_json(resp) assert len(data['contents']) == 2 assert all([x['is_head'] for x in data['contents']]) assert set(data['contents'][0].keys()) == { 'checksum', 'created', 'delete_marker', 'is_head', 'key', 'links', 'mimetype', 'size', 'updated', 'version_id', } assert set(data.keys()) == { 'contents', 'created', 'id', 'links', 'locked', 'max_file_size', 'quota_size', 'size', 'updated', } # Non-existing bucket resp = client.get(url_for( 'invenio_files_rest.bucket_api', bucket_id='invalid', ), headers=headers) assert resp.status_code == 404
def test_delete(client, db, bucket, objects, permissions, user, expected): """Test deleting an object.""" login_user(client, permissions[user]) for obj in objects: # Valid object resp = client.delete(url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, )) assert resp.status_code == expected if resp.status_code == 204: assert not ObjectVersion.get(bucket.id, obj.key) else: assert ObjectVersion.get(bucket.id, obj.key) # Invalid object assert client.delete(url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key='invalid', )).status_code == 404
def test_post(client, headers, permissions, bucket): """Test ObjectResource view POST method.""" cases = [ (None, 404), ('auth', 404), ('bucket', 403), ('location', 403), ] key = 'file.pdf' data = b'mycontent' for user, expected in cases: login_user(client, permissions[user]) resp = client.post( url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=key), data={'file': (BytesIO(data), key)}, headers={'Accept': '*/*'}, ) assert resp.status_code == expected
def test_post_complete(client, headers, permissions, bucket, multipart, multipart_url, parts, get_json, user, expected): """Test complete multipart upload.""" login_user(client, permissions[user]) # Mock celery task to emulate real usage. def _mock_celery_result(): yield False yield False merge_multipartobject(str(multipart.upload_id)) yield True result_iter = _mock_celery_result() task_result = MagicMock() task_result.ready = MagicMock(side_effect=lambda *args: next(result_iter)) task_result.successful = MagicMock(return_value=True) # Complete multipart upload with patch('invenio_files_rest.views.merge_multipartobject') as task: task.delay = MagicMock(return_value=task_result) res = client.post(multipart_url) assert res.status_code == expected if res.status_code == 200: data = get_json(res) assert data['completed'] is True assert task.called_with(str(multipart.upload_id)) # Two whitespaces expected to have been sent to client before # JSON was sent. assert res.data.startswith(b' {') # Multipart object no longer exists assert client.get(multipart_url).status_code == 404 # Object exists assert client.get(data['links']['object']).status_code == 200
def test_delete(client, db, bucket, multipart, multipart_url, permissions, parts, get_json): """Test complete when parts are missing.""" assert bucket.size == multipart.size cases = [ (None, 404), ('auth', 404), ('objects', 404), ('bucket', 404), ('location', 204), ] for user, expected in cases: login_user(client, permissions[user]) res = client.delete(multipart_url) assert res.status_code == expected if res.status_code == 204: assert client.get(multipart_url).status_code == 404 assert MultipartObject.query.count() == 0 assert Part.query.count() == 0 assert Bucket.get(bucket.id).size == 0
def test_delete_versions(client, db, bucket, versions, permissions, user, expected): """Test deleting an object.""" login_user(client, permissions[user]) for obj in versions: # Valid delete resp = client.delete(url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, versionId=obj.version_id, )) assert resp.status_code == expected if resp.status_code == 204: assert not ObjectVersion.get( bucket.id, obj.key, version_id=obj.version_id) # Invalid object assert client.delete(url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=obj.key, versionId='deadbeef-65bd-4d9b-93e2-ec88cc59aec5' )).status_code == 404
def test_put_versioning(client, bucket, permissions, get_md5, get_json): """Test versioning feature.""" key = 'test.txt' files = [b'v1', b'v2'] object_url = url_for( 'invenio_files_rest.object_api', bucket_id=bucket.id, key=key) # Upload to same key twice login_user(client, permissions['location']) for f in files: resp = client.put(object_url, input_stream=BytesIO(f)) assert resp.status_code == 200 # Assert we have two versions resp = client.get(url_for( 'invenio_files_rest.bucket_api', bucket_id=bucket.id, ), query_string='versions=1') data = get_json(resp, code=200) assert len(data['contents']) == 2 # Assert we can get both versions for item in data['contents']: assert client.get(item['links']['self']).status_code == 200
def test_get_location(app, client, headers, dummy_location, permissions, user, expected): """Test GET a location.""" login_user(client, permissions[user]) r = client.get(url_for('invenio_files_rest.location_api'), headers=headers) assert r.status_code == expected