# Complete the chunked upload. storage_engine.complete_chunked_upload(upload_id, "some/chunked/path", new_metadata) # Ensure the file contents are valid. assert len(all_data) == len( storage_engine.get_content("some/chunked/path")) assert storage_engine.get_content("some/chunked/path") == all_data @pytest.mark.parametrize( "max_size, parts", [ (50, [ _PartUploadMetadata("foo", 0, 50), _PartUploadMetadata("foo", 50, 50), ]), ( 40, [ _PartUploadMetadata("foo", 0, 25), _PartUploadMetadata("foo", 25, 25), _PartUploadMetadata("foo", 50, 25), _PartUploadMetadata("foo", 75, 25), ], ), (51, [ _PartUploadMetadata("foo", 0, 50), _PartUploadMetadata("foo", 50, 50), ]),
def test_rechunked(max_size, parts): chunk = _PartUploadMetadata("foo", 0, 100) rechunked = list(_CloudStorage._rechunk(chunk, max_size)) assert len(rechunked) == len(parts) for index, chunk in enumerate(rechunked): assert chunk == parts[index]
assert len(chunk_data) == bytes_written all_data = all_data + chunk_data metadata = new_metadata # Complete the chunked upload. storage_engine.complete_chunked_upload(upload_id, "some/chunked/path", new_metadata) # Ensure the file contents are valid. assert len(all_data) == len(storage_engine.get_content("some/chunked/path")) assert storage_engine.get_content("some/chunked/path") == all_data @pytest.mark.parametrize( "max_size, parts", [ (50, [_PartUploadMetadata("foo", 0, 50), _PartUploadMetadata("foo", 50, 50),]), ( 40, [ _PartUploadMetadata("foo", 0, 25), _PartUploadMetadata("foo", 25, 25), _PartUploadMetadata("foo", 50, 25), _PartUploadMetadata("foo", 75, 25), ], ), (51, [_PartUploadMetadata("foo", 0, 50), _PartUploadMetadata("foo", 50, 50),]), ( 49, [ _PartUploadMetadata("foo", 0, 25), _PartUploadMetadata("foo", 25, 25),