async def test_iterate_storage(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() own_dummy_request.headers.update( { "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", } ) for idx in range(20): own_dummy_request._payload = FakeContentReader() own_dummy_request._cache_data = b"" own_dummy_request._last_read_pos = 0 ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.upload() util = get_utility(IS3BlobStore) items = [] async for item in util.iterate_bucket(): items.append(item) assert len(items) == 20 await _cleanup()
async def test_store_file_in_cloud_using_tus(dummy_request): request = dummy_request # noqa login(request) request._container_id = 'test-container' await _cleanup() request.headers.update({ 'Content-Type': 'image/gif', 'UPLOAD-MD5HASH': md5(_test_gif).hexdigest(), 'UPLOAD-EXTENSION': 'gif', 'UPLOAD-FILENAME': 'test.gif', 'UPLOAD-LENGTH': len(_test_gif), 'TUS-RESUMABLE': '1.0.0', 'Content-Length': len(_test_gif), 'upload-offset': 0 }) request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, request, IContent['file'].bind(ob)) await mng.tus_create() await mng.tus_patch() assert ob.file._upload_file_id is None assert ob.file.uri is not None assert ob.file.content_type == 'image/gif' assert ob.file.filename == 'test.gif' assert ob.file._size == len(_test_gif) assert len(await get_all_objects()) == 1 gmng = S3FileStorageManager(ob, request, IContent['file'].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_download(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() file_data = b"" # we want to test multiple chunks here... while len(file_data) < CHUNK_SIZE: file_data += _test_gif dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(file_data).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(file_data), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader(file_data) dummy_request.send = FakeContentSend() ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert ob.file.upload_file_id is None assert ob.file.uri is not None resp = await mng.download() assert int(resp.content_length) == len(file_data)
async def test_store_file_in_cloud(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() own_dummy_request.headers.update( { "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", } ) own_dummy_request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.upload() assert ob.file._upload_file_id is None assert ob.file.uri is not None assert ob.file.content_type == "image/gif" assert ob.file.filename == "test.gif" assert ob.file._size == len(_test_gif) assert ob.file.md5 is not None assert ob.__uuid__ in ob.file.uri assert len(await get_all_objects()) == 1 gmng = S3FileStorageManager(ob, own_dummy_request, IContent["file"].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_upload_works_with_plus_id(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() parent = create_content(id="foobar") ob = create_content(id="*****@*****.**", parent=parent) ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert getattr(ob.file, "upload_file_id", None) is None assert ob.file.uri is not None items = await get_all_objects() assert len(items) == 1 assert items[0]["name"] == ob.file.uri
async def test_iterate_storage(dummy_request): request = dummy_request # noqa login(request) request._container_id = 'test-container' await _cleanup() request.headers.update({ 'Content-Type': 'image/gif', 'X-UPLOAD-MD5HASH': md5(_test_gif).hexdigest(), 'X-UPLOAD-EXTENSION': 'gif', 'X-UPLOAD-SIZE': len(_test_gif), 'X-UPLOAD-FILENAME': 'test.gif' }) for _ in range(20): request._payload = FakeContentReader() request._cache_data = b'' request._last_read_pos = 0 ob = create_content() ob.file = None mng = FileManager(ob, request, IContent['file'].bind(ob)) await mng.upload() util = get_utility(IS3BlobStore) items = [] async for item in util.iterate_bucket(): items.append(item) assert len(items) == 20 await _cleanup()
async def test_read_range(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() own_dummy_request.headers.update( { "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", } ) own_dummy_request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.upload() s3mng = S3FileStorageManager(ob, own_dummy_request, IContent["file"].bind(ob)) async for chunk in s3mng.read_range(0, 100): assert len(chunk) == 100 assert chunk == _test_gif[:100] async for chunk in s3mng.read_range(100, 200): assert len(chunk) == 100 assert chunk == _test_gif[100:200]
async def test_store_file_in_cloud(dummy_request): request = dummy_request # noqa login(request) request._container_id = 'test-container' await _cleanup() request.headers.update({ 'Content-Type': 'image/gif', 'X-UPLOAD-MD5HASH': md5(_test_gif).hexdigest(), 'X-UPLOAD-EXTENSION': 'gif', 'X-UPLOAD-SIZE': len(_test_gif), 'X-UPLOAD-FILENAME': 'test.gif' }) request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, request, IContent['file'].bind(ob)) await mng.upload() assert ob.file._upload_file_id is None assert ob.file.uri is not None assert ob.file.content_type == 'image/gif' assert ob.file.filename == 'test.gif' assert ob.file._size == len(_test_gif) assert ob.file.md5 is not None assert ob._p_oid in ob.file.uri assert len(await get_all_objects()) == 1 gmng = S3FileStorageManager(ob, request, IContent['file'].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_raises_not_retryable(dummy_request): request = dummy_request # noqa login(request) request._container_id = 'test-container' await _cleanup() file_data = b'' # we want to test multiple chunks here... while len(file_data) < MAX_REQUEST_CACHE_SIZE: file_data += _test_gif request.headers.update({ 'Content-Type': 'image/gif', 'X-UPLOAD-MD5HASH': md5(file_data).hexdigest(), 'X-UPLOAD-EXTENSION': 'gif', 'X-UPLOAD-SIZE': len(file_data), 'X-UPLOAD-FILENAME': 'test.gif' }) request._payload = FakeContentReader(file_data) ob = create_content() ob.file = None mng = FileManager(ob, request, IContent['file'].bind(ob)) await mng.upload() request._retry_attempt = 1 with pytest.raises(UnRetryableRequestError): await mng.upload()
async def test_download(dummy_request): request = dummy_request # noqa login(request) request._container_id = 'test-container' await _cleanup() file_data = b'' # we want to test multiple chunks here... while len(file_data) < CHUNK_SIZE: file_data += _test_gif request.headers.update({ 'Content-Type': 'image/gif', 'X-UPLOAD-MD5HASH': md5(file_data).hexdigest(), 'X-UPLOAD-EXTENSION': 'gif', 'X-UPLOAD-SIZE': len(file_data), 'X-UPLOAD-FILENAME': 'test.gif' }) request._payload = FakeContentReader(file_data) ob = create_content() ob.file = None mng = FileManager(ob, request, IContent['file'].bind(ob)) await mng.upload() assert ob.file._upload_file_id is None assert ob.file.uri is not None resp = await mng.download() assert resp.content_length == len(file_data)
async def test_raises_not_retryable(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: dummy_request._container_id = "test-container" await _cleanup() file_data = b"" # we want to test multiple chunks here... while len(file_data) < MAX_REQUEST_CACHE_SIZE: file_data += _test_gif dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(file_data).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(file_data), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader(file_data) ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() dummy_request._retry_attempt = 1 with pytest.raises(UnRetryableRequestError): await mng.upload()
async def test_multipart_upload_with_tus(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() file_data = _test_gif while len(file_data) < (11 * 1024 * 1024): file_data += _test_gif own_dummy_request.headers.update( { "Content-Type": "image/gif", "UPLOAD-MD5HASH": md5(file_data).hexdigest(), "UPLOAD-EXTENSION": "gif", "UPLOAD-FILENAME": "test.gif", "TUS-RESUMABLE": "1.0.0", "UPLOAD-LENGTH": len(file_data), } ) own_dummy_request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.tus_create() chunk = file_data[: 5 * 1024 * 1024] own_dummy_request.headers.update( {"Content-Length": len(chunk), "upload-offset": 0} ) own_dummy_request._payload = FakeContentReader(chunk) own_dummy_request._cache_data = b"" own_dummy_request._last_read_pos = 0 await mng.tus_patch() chunk = file_data[5 * 1024 * 1024 :] own_dummy_request.headers.update( {"Content-Length": len(chunk), "upload-offset": 5 * 1024 * 1024} ) own_dummy_request._payload = FakeContentReader(chunk) own_dummy_request._cache_data = b"" own_dummy_request._last_read_pos = 0 await mng.tus_patch() assert ob.file._upload_file_id is None assert ob.file.uri is not None assert ob.file.content_type == "image/gif" assert ob.file.filename == "test.gif" assert ob.file._size == len(file_data) assert len(await get_all_objects()) == 1 gmng = S3FileStorageManager(ob, own_dummy_request, IContent["file"].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_store_file_deletes_already_started(dummy_request, mock_txn): container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: login() await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert getattr(ob.file, "upload_file_id", None) is None assert ob.file.uri is not None items = await get_all_objects() assert len(items) == 1 assert items[0]["name"] == ob.file.uri original = ob.file._uri ob.__uploads__ = { "file": { # like it is in middle of upload so it deletes existing "upload_file_id": ob.file.uri } } dummy_request.content.seek(0) dummy_request._cache_data = b"" dummy_request._last_read_pos = 0 await mng.upload() assert ob.file.upload_file_id is None assert ob.file.uri != original assert len(await get_all_objects()) == 1 gmng = GCloudFileManager(ob, dummy_request, IContent["file"].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_multipart_upload_with_tus(dummy_request): request = dummy_request # noqa login(request) request._container_id = 'test-container' await _cleanup() file_data = _test_gif while len(file_data) < (11 * 1024 * 1024): file_data += _test_gif request.headers.update({ 'Content-Type': 'image/gif', 'UPLOAD-MD5HASH': md5(file_data).hexdigest(), 'UPLOAD-EXTENSION': 'gif', 'UPLOAD-FILENAME': 'test.gif', 'TUS-RESUMABLE': '1.0.0', 'UPLOAD-LENGTH': len(file_data) }) request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, request, IContent['file'].bind(ob)) await mng.tus_create() chunk = file_data[:5 * 1024 * 1024] request.headers.update({'Content-Length': len(chunk), 'upload-offset': 0}) request._payload = FakeContentReader(chunk) request._cache_data = b'' request._last_read_pos = 0 await mng.tus_patch() chunk = file_data[5 * 1024 * 1024:] request.headers.update({ 'Content-Length': len(chunk), 'upload-offset': 5 * 1024 * 1024 }) request._payload = FakeContentReader(chunk) request._cache_data = b'' request._last_read_pos = 0 await mng.tus_patch() assert ob.file._upload_file_id is None assert ob.file.uri is not None assert ob.file.content_type == 'image/gif' assert ob.file.filename == 'test.gif' assert ob.file._size == len(file_data) assert len(await get_all_objects()) == 1 gmng = S3FileStorageManager(ob, request, IContent['file'].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_save_file(dummy_request): request = dummy_request # noqa login(request) request._container_id = 'test-container' await _cleanup() ob = create_content() ob.file = None mng = FileManager(ob, request, IContent['file'].bind(ob)) async def generator(): yield 5000 * b'x' await mng.save_file(generator, content_type='application/data') assert ob.file.size == 5000 items = await get_all_objects() assert len(items) == 1
async def test_save_file(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) async def generator(): yield 5000 * b"x" await mng.save_file(generator, content_type="application/data") assert ob.file.size == 5000 items = await get_all_objects() assert len(items) == 1
async def test_store_file_when_request_retry_happens(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert ob.file.upload_file_id is None assert ob.file.uri is not None items = await get_all_objects() assert len(items) == 1 assert items[0]["name"] == ob.file.uri # test retry... dummy_request._retry_attempt = 1 await mng.upload() assert ob.file.content_type == "image/gif" assert ob.file.filename == "test.gif" assert ob.file._size == len(_test_gif) assert len(await get_all_objects()) == 1 gmng = GCloudFileManager(ob, dummy_request, IContent["file"].bind(ob)) await gmng.delete_upload(ob.file.uri) assert len(await get_all_objects()) == 0
async def test_copy(own_dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with own_dummy_request: await _cleanup() own_dummy_request.headers.update( { "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", } ) own_dummy_request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, own_dummy_request, IContent["file"].bind(ob)) await mng.upload() items = await get_all_objects() assert len(items) == 1 new_ob = create_content() new_ob.file = None gmng = S3FileStorageManager(ob, own_dummy_request, IContent["file"].bind(ob)) dm = DBDataManager(gmng) await dm.load() new_gmng = S3FileStorageManager( new_ob, own_dummy_request, IContent["file"].bind(new_ob) ) new_dm = DBDataManager(new_gmng) await new_dm.load() await gmng.copy(new_gmng, new_dm) new_ob.file.content_type == ob.file.content_type new_ob.file.size == ob.file.size new_ob.file.uri != ob.file.uri items = await get_all_objects() assert len(items) == 2
async def test_copy(dummy_request): request = dummy_request # noqa login(request) request._container_id = 'test-container' await _cleanup() request.headers.update({ 'Content-Type': 'image/gif', 'X-UPLOAD-MD5HASH': md5(_test_gif).hexdigest(), 'X-UPLOAD-EXTENSION': 'gif', 'X-UPLOAD-SIZE': len(_test_gif), 'X-UPLOAD-FILENAME': 'test.gif' }) request._payload = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, request, IContent['file'].bind(ob)) await mng.upload() items = await get_all_objects() assert len(items) == 1 new_ob = create_content() new_ob.file = None gmng = S3FileStorageManager(ob, request, IContent['file'].bind(ob)) dm = DBDataManager(gmng) await dm.load() new_gmng = S3FileStorageManager(new_ob, request, IContent['file'].bind(new_ob)) new_dm = DBDataManager(new_gmng) await new_dm.load() await gmng.copy(new_gmng, new_dm) new_ob.file.content_type == ob.file.content_type new_ob.file.size == ob.file.size new_ob.file.uri != ob.file.uri items = await get_all_objects() assert len(items) == 2
async def test_read_range(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() assert getattr(ob.file, "upload_file_id", None) is None assert ob.file.uri is not None assert len(await get_all_objects()) == 1 gmng = GCloudFileManager(ob, dummy_request, IContent["file"].bind(ob)) async for chunk in gmng.read_range(0, 100): assert len(chunk) == 100 assert chunk == _test_gif[:100] async for chunk in gmng.read_range(100, 200): assert len(chunk) == 100 assert chunk == _test_gif[100:200]
async def test_iterate_storage(dummy_request, mock_txn): login() container = create_content(Container, id="test-container") task_vars.container.set(container) with dummy_request: await _cleanup() dummy_request.headers.update({ "Content-Type": "image/gif", "X-UPLOAD-MD5HASH": md5(_test_gif).hexdigest(), "X-UPLOAD-EXTENSION": "gif", "X-UPLOAD-SIZE": len(_test_gif), "X-UPLOAD-FILENAME": "test.gif", }) dummy_request._stream_reader = FakeContentReader() for _ in range(20): dummy_request.content.seek(0) dummy_request._cache_data = b"" dummy_request._last_read_pos = 0 ob = create_content() ob.file = None mng = FileManager(ob, dummy_request, IContent["file"].bind(ob)) await mng.upload() util = get_utility(IGCloudBlobStore) count = 0 async for item in util.iterate_bucket(): # noqa count += 1 assert count == 20 await _cleanup()