async def test_retry_put_block_with_seekable_stream_async(self, resource_group, location, storage_account, storage_account_key): pytest.skip("Aiohttp closes stream after request - cannot rewind.") # Arrange bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key, retry_policy=self.retry, transport=AiohttpTestTransport()) await self._setup(bsc) blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = BytesIO(data) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = bsc.get_blob_client(self.container_name, blob_name) await blob.stage_block(1, data_stream, raw_response_hook=responder.override_first_status) # Assert _, uncommitted_blocks = await blob.get_block_list( block_list_type="uncommitted", raw_response_hook=responder.override_first_status) self.assertEqual(len(uncommitted_blocks), 1) self.assertEqual(uncommitted_blocks[0].size, PUT_BLOCK_SIZE) # Commit block and verify content await blob.commit_block_list(['1'], raw_response_hook=responder.override_first_status) # Assert content = await (await blob.download_blob()).readall() self.assertEqual(content, data)
def test_service_client_api_version_property(self): service_client = BlobServiceClient( "https://foo.blob.core.windows.net/account", credential="fake_key") self.assertEqual(service_client.api_version, self.api_version_2) self.assertEqual(service_client._client._config.version, self.api_version_2) with pytest.raises(AttributeError): service_client.api_version = "foo" service_client = BlobServiceClient( "https://foo.blob.core.windows.net/account", credential="fake_key", api_version=self.api_version_1) self.assertEqual(service_client.api_version, self.api_version_1) self.assertEqual(service_client._client._config.version, self.api_version_1) container_client = service_client.get_container_client("foo") self.assertEqual(container_client.api_version, self.api_version_1) self.assertEqual(container_client._client._config.version, self.api_version_1) blob_client = service_client.get_blob_client("foo", "bar") self.assertEqual(blob_client.api_version, self.api_version_1) self.assertEqual(blob_client._client._config.version, self.api_version_1)
async def test_ors_source(self, resource_group, location, storage_account, storage_account_key): # Arrange bsc = BlobServiceClient( self.account_url(storage_account, "blob"), credential=storage_account_key, transport=AiohttpTestTransport(connection_data_block_size=1024)) blob = bsc.get_blob_client(container=self.SRC_CONTAINER, blob=self.BLOB_NAME) # Act props = await blob.get_blob_properties() # Assert self.assertIsInstance(props, BlobProperties) self.assertIsNotNone(props.object_replication_source_properties) for replication_policy in props.object_replication_source_properties: self.assertNotEqual(replication_policy.policy_id, '') self.assertIsNotNone(replication_policy.rules) for rule in replication_policy.rules: self.assertNotEqual(rule.rule_id, '') self.assertIsNotNone(rule.status) self.assertNotEqual(rule.status, '') # Check that the download function gives back the same result stream = await blob.download_blob() self.assertEqual(stream.properties.object_replication_source_properties, props.object_replication_source_properties)
async def test_create_page_blob_with_chunks(self, resource_group, location, storage_account, storage_account_key): # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute bsc = BlobServiceClient( self.account_url(storage_account, "blob"), storage_account_key, max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024, transport=AiohttpTestTransport(connection_data_block_size=1024)) await self._setup(bsc) # Act blob_client = bsc.get_blob_client(self.container_name, self._get_blob_reference()) page_blob_prop = await blob_client.upload_blob(self.byte_data, blob_type=BlobType.PageBlob, max_concurrency=2, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) # Assert self.assertIsNotNone(page_blob_prop['etag']) self.assertIsNotNone(page_blob_prop['last_modified']) self.assertTrue(page_blob_prop['request_server_encrypted']) self.assertEqual(page_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE) # Act get the blob content blob = await blob_client.download_blob() # Assert content was retrieved with the cpk self.assertEqual(await blob.readall(), self.byte_data) self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
async def test_retry_put_block_with_non_seekable_stream_async(self, storage_account_name, storage_account_key): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, retry_policy=self.retry, transport=AiohttpTestTransport()) await self._setup(bsc) blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = self.NonSeekableStream(BytesIO(data)) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = bsc.get_blob_client(self.container_name, blob_name) # Note: put_block transforms non-seekable streams into byte arrays before handing it off to the executor await blob.stage_block(1, data_stream, raw_response_hook=responder.override_first_status) # Assert _, uncommitted_blocks = await blob.get_block_list( block_list_type="uncommitted", raw_response_hook=responder.override_first_status) self.assertEqual(len(uncommitted_blocks), 1) self.assertEqual(uncommitted_blocks[0].size, PUT_BLOCK_SIZE) # Commit block and verify content await blob.commit_block_list(['1'], raw_response_hook=responder.override_first_status) # Assert content = await (await blob.download_blob()).readall() self.assertEqual(content, data)
async def test_retry_put_block_with_non_seekable_stream_fail_async( self, resource_group, location, storage_account, storage_account_key): if not self.is_live: pytest.skip("live only") # Arrange bsc = BlobServiceClient(self._account_url(storage_account.name), credential=storage_account_key, retry_policy=self.retry, transport=AiohttpTestTransport()) await self._setup(bsc) blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = self.NonSeekableStream(BytesIO(data)) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = bsc.get_blob_client(self.container_name, blob_name) with self.assertRaises(HttpResponseError) as error: await blob.stage_block( 1, data_stream, length=PUT_BLOCK_SIZE, raw_response_hook=responder.override_first_status) # Assert self.assertEqual(error.exception.response.status_code, 408)
async def test_standard_blob_tier_set_tier_api(self, storage_account_name, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, transport=AiohttpTestTransport()) await self._setup(bsc) tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] for tier in tiers: blob_name = self.get_resource_name(tier.value) blob = bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(b'hello world') blob_ref = await blob.get_blob_properties() self.assertIsNotNone(blob_ref.blob_tier) self.assertTrue(blob_ref.blob_tier_inferred) self.assertIsNone(blob_ref.blob_tier_change_time) # Act await blob.set_standard_blob_tier(tier) # Assert blob_ref2 = await blob.get_blob_properties() self.assertEqual(tier, blob_ref2.blob_tier) self.assertFalse(blob_ref2.blob_tier_inferred) self.assertIsNotNone(blob_ref2.blob_tier_change_time) await blob.delete_blob()
async def download_blob_using_blobservice(account_name: str, credential: DefaultAzureCredential, container_name:str , blob_name: str, file_stream: io.BytesIO): try: # Timeout didn't work on this code... blob_service = BlobServiceClient(f"{account_name}.blob.core.windows.net", credential=credential, connection_timeout=1, read_timeout=1) blob_client = blob_service.get_blob_client(container_name, blob_name) storage_stream_downloader = await blob_client.download_blob() await storage_stream_downloader.readinto(file_stream) return except ResourceNotFoundError: raise KeyError(blob_name) except ClientAuthenticationError: raise
async def test_ors_destination(self, storage_account_name, storage_account_key): # Arrange bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=storage_account_key, transport=AiohttpTestTransport(connection_data_block_size=1024)) blob = bsc.get_blob_client(container=self.DST_CONTAINER, blob=self.BLOB_NAME) # Act props = await blob.get_blob_properties() # Assert self.assertIsInstance(props, BlobProperties) self.assertIsNotNone(props.object_replication_destination_policy) # Check that the download function gives back the same result stream = await blob.download_blob() self.assertEqual(stream.properties.object_replication_destination_policy, props.object_replication_destination_policy)
async def test_copy_source_sas_is_scrubbed_off(self, resource_group, location, storage_account, storage_account_key): # Test can only run live if not self.is_live: pytest.skip("live only") bsc = BlobServiceClient(self._account_url(storage_account.name), storage_account_key) await self._setup(bsc) # Arrange dest_blob_name = self.get_resource_name('destblob') dest_blob = bsc.get_blob_client(self.container_name, dest_blob_name) # parse out the signed signature query_parameters = urlparse(self.source_blob_url).query token_components = parse_qs(query_parameters) if QueryStringConstants.SIGNED_SIGNATURE not in token_components: pytest.fail( "Blob URL {} doesn't contain {}, parsed query params: {}". format(self.source_blob_url, QueryStringConstants.SIGNED_SIGNATURE, list(token_components.keys()))) signed_signature = quote( token_components[QueryStringConstants.SIGNED_SIGNATURE][0]) # Act with LogCaptured(self) as log_captured: await dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True, logging_enable=True) log_as_str = log_captured.getvalue() # Assert # make sure the query parameter 'sig' is logged, but its value is not self.assertTrue( QueryStringConstants.SIGNED_SIGNATURE in log_as_str) self.assertFalse(signed_signature in log_as_str) # make sure authorization header is logged, but its value is not # the keyword SharedKey is present in the authorization header's value self.assertTrue(_AUTHORIZATION_HEADER_NAME in log_as_str) self.assertFalse('SharedKey' in log_as_str)
class StorageBlockBlobTestAsync(StorageTestCase): def setUp(self): super(StorageBlockBlobTestAsync, self).setUp() url = self._get_account_url() # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( url, credential=self.settings.STORAGE_ACCOUNT_KEY, connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') def tearDown(self): if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete( self.bsc.delete_container(self.container_name)) except: pass if os.path.isfile(FILE_PATH): try: os.remove(FILE_PATH) except: pass return super(StorageBlockBlobTestAsync, self).tearDown() #--Helpers----------------------------------------------------------------- async def _setup(self): if not self.is_playback(): try: await self.bsc.create_container(self.container_name) except ResourceExistsError: pass def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) async def _create_blob(self): await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(b'') return blob async def assertBlobEqual(self, container_name, blob_name, expected_data): await self._setup() blob = self.bsc.get_blob_client(container_name, blob_name) stream = await blob.download_blob() actual_data = await stream.content_as_bytes() self.assertEqual(actual_data, expected_data) class NonSeekableFile(object): def __init__(self, wrapped_file): self.wrapped_file = wrapped_file def write(self, data): self.wrapped_file.write(data) def read(self, count): return self.wrapped_file.read(count) #--Test cases for block blobs -------------------------------------------- async def _test_put_block(self): await self._setup() # Arrange blob = await self._create_blob() # Act for i in range(5): resp = await blob.stage_block( i, 'block {0}'.format(i).encode('utf-8')) self.assertIsNone(resp) # Assert @record def test_put_block(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block()) async def _test_put_block_unicode(self): await self._setup() # Arrange blob = await self._create_blob() # Act resp = await blob.stage_block('1', u'啊齄丂狛狜') self.assertIsNone(resp) # Assert @record def test_put_block_unicode(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_unicode()) async def _test_put_block_with_md5(self): # Arrange await self._setup() blob = await self._create_blob() # Act await blob.stage_block(1, b'block', validate_content=True) # Assert @record def test_put_block_with_md5(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_with_md5()) async def _test_put_block_list(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.stage_block('1', b'AAA') await blob.stage_block('2', b'BBB') await blob.stage_block('3', b'CCC') # Act block_list = [ BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3') ] put_block_list_resp = await blob.commit_block_list(block_list) # Assert content = await blob.download_blob() actual = await content.content_as_bytes() self.assertEqual(actual, b'AAABBBCCC') self.assertEqual(content.properties.etag, put_block_list_resp.get('etag')) self.assertEqual(content.properties.last_modified, put_block_list_resp.get('last_modified')) @record def test_put_block_list(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_list()) async def _test_put_block_list_invalid_block_id(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.stage_block('1', b'AAA') await blob.stage_block('2', b'BBB') await blob.stage_block('3', b'CCC') # Act try: block_list = [ BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='4') ] await blob.commit_block_list(block_list) self.fail() except HttpResponseError as e: self.assertGreaterEqual( str(e).find('specified block list is invalid'), 0) # Assert @record def test_put_block_list_invalid_block_id(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_list_invalid_block_id()) async def _test_put_block_list_with_md5(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.stage_block('1', b'AAA') await blob.stage_block('2', b'BBB') await blob.stage_block('3', b'CCC') # Act block_list = [ BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3') ] await blob.commit_block_list(block_list, validate_content=True) # Assert @record def test_put_block_list_with_md5(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_list_with_md5()) async def _test_put_block_list_with_blob_tier_specified(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) await blob_client.stage_block('1', b'AAA') await blob_client.stage_block('2', b'BBB') await blob_client.stage_block('3', b'CCC') blob_tier = StandardBlobTier.Cool # Act block_list = [ BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3') ] await blob_client.commit_block_list(block_list, standard_blob_tier=blob_tier) # Assert blob_properties = await blob_client.get_blob_properties() self.assertEqual(blob_properties.blob_tier, blob_tier) @record def test_put_block_list_with_blob_tier_specified_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_put_block_list_with_blob_tier_specified()) async def _test_get_block_list_no_blocks(self): # Arrange await self._setup() blob = await self._create_blob() # Act block_list = await blob.get_block_list('all') # Assert self.assertIsNotNone(block_list) self.assertEqual(len(block_list[1]), 0) self.assertEqual(len(block_list[0]), 0) @record def test_get_block_list_no_blocks(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_get_block_list_no_blocks()) async def _test_get_block_list_uncommitted_blocks(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.stage_block('1', b'AAA') await blob.stage_block('2', b'BBB') await blob.stage_block('3', b'CCC') # Act block_list = await blob.get_block_list('uncommitted') # Assert self.assertIsNotNone(block_list) self.assertEqual(len(block_list), 2) self.assertEqual(len(block_list[1]), 3) self.assertEqual(len(block_list[0]), 0) self.assertEqual(block_list[1][0].id, '1') self.assertEqual(block_list[1][0].size, 3) self.assertEqual(block_list[1][1].id, '2') self.assertEqual(block_list[1][1].size, 3) self.assertEqual(block_list[1][2].id, '3') self.assertEqual(block_list[1][2].size, 3) @record def test_get_block_list_uncommitted_blocks(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_get_block_list_uncommitted_blocks()) async def _test_get_block_list_committed_blocks(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.stage_block('1', b'AAA') await blob.stage_block('2', b'BBB') await blob.stage_block('3', b'CCC') block_list = [ BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3') ] await blob.commit_block_list(block_list) # Act block_list = await blob.get_block_list('committed') # Assert self.assertIsNotNone(block_list) self.assertEqual(len(block_list), 2) self.assertEqual(len(block_list[1]), 0) self.assertEqual(len(block_list[0]), 3) self.assertEqual(block_list[0][0].id, '1') self.assertEqual(block_list[0][0].size, 3) self.assertEqual(block_list[0][1].id, '2') self.assertEqual(block_list[0][1].size, 3) self.assertEqual(block_list[0][2].id, '3') self.assertEqual(block_list[0][2].size, 3) @record def test_get_block_list_committed_blocks(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_get_block_list_committed_blocks()) async def _test_create_small_block_blob_with_no_overwrite(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data1 = b'hello world' data2 = b'hello second world' # Act create_resp = await blob.upload_blob(data1, overwrite=True) with self.assertRaises(ResourceExistsError): await blob.upload_blob(data2, overwrite=False) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data1) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) self.assertEqual(props.blob_type, BlobType.BlockBlob) @record def test_create_small_block_blob_with_no_overwrite(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_small_block_blob_with_no_overwrite()) async def _test_create_small_block_blob_with_overwrite(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data1 = b'hello world' data2 = b'hello second world' # Act create_resp = await blob.upload_blob(data1, overwrite=True) update_resp = await blob.upload_blob(data2, overwrite=True) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data2) self.assertEqual(props.etag, update_resp.get('etag')) self.assertEqual(props.last_modified, update_resp.get('last_modified')) self.assertEqual(props.blob_type, BlobType.BlockBlob) @record def test_create_small_block_blob_with_overwrite(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_small_block_blob_with_overwrite()) async def _test_create_large_block_blob_with_no_overwrite(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) data2 = self.get_random_bytes(LARGE_BLOB_SIZE) # Act create_resp = await blob.upload_blob(data1, overwrite=True, metadata={'BlobData': 'Data1'}) with self.assertRaises(ResourceExistsError): await blob.upload_blob(data2, overwrite=False, metadata={'BlobData': 'Data2'}) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data1) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) self.assertEqual(props.blob_type, BlobType.BlockBlob) self.assertEqual(props.metadata, {'BlobData': 'Data1'}) self.assertEqual(props.size, LARGE_BLOB_SIZE) @record def test_create_large_block_blob_with_no_overwrite(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_large_block_blob_with_no_overwrite()) async def _test_create_large_block_blob_with_overwrite(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act create_resp = await blob.upload_blob(data1, overwrite=True, metadata={'BlobData': 'Data1'}) update_resp = await blob.upload_blob(data2, overwrite=True, metadata={'BlobData': 'Data2'}) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data2) self.assertEqual(props.etag, update_resp.get('etag')) self.assertEqual(props.last_modified, update_resp.get('last_modified')) self.assertEqual(props.blob_type, BlobType.BlockBlob) self.assertEqual(props.metadata, {'BlobData': 'Data2'}) self.assertEqual(props.size, LARGE_BLOB_SIZE + 512) @record def test_create_large_block_blob_with_overwrite(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_large_block_blob_with_overwrite()) async def _test_create_blob_from_bytes_single_put(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = b'hello world' # Act create_resp = await blob.upload_blob(data) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) @record def test_create_blob_from_bytes_single_put(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_from_bytes_single_put()) async def _test_create_blob_from_0_bytes(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = b'' # Act create_resp = await blob.upload_blob(data) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) @record def test_create_blob_from_0_bytes(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_from_0_bytes()) async def _test_create_from_bytes_blob_unicode(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = b'hello world' # Act create_resp = await blob.upload_blob(data) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) @record def test_create_from_bytes_blob_unicode(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_from_bytes_blob_unicode()) async def _test_create_from_bytes_blob_with_lease_id(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) lease = await blob.acquire_lease() # Act create_resp = await blob.upload_blob(data, lease=lease) # Assert output = await blob.download_blob(lease=lease) actual = await output.content_as_bytes() self.assertEqual(actual, data) self.assertEqual(output.properties.etag, create_resp.get('etag')) self.assertEqual(output.properties.last_modified, create_resp.get('last_modified')) @record def test_create_from_bytes_blob_with_lease_id(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_from_bytes_blob_with_lease_id()) async def _test_create_blob_from_bytes_with_metadata(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) metadata = {'hello': 'world', 'number': '42'} # Act await blob.upload_blob(data, metadata=metadata) # Assert md = await blob.get_blob_properties() md = md.metadata self.assertDictEqual(md, metadata) @record def test_create_blob_from_bytes_with_metadata(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_bytes_with_metadata()) async def _test_create_blob_from_bytes_with_properties(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') await blob.upload_blob(data, content_settings=content_settings) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) @record def test_create_blob_from_bytes_with_properties(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_bytes_with_properties()) async def _test_create_blob_from_bytes_with_progress(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) create_resp = await blob.upload_blob(data, raw_response_hook=callback) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) @record def test_create_blob_from_bytes_with_progress(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_bytes_with_progress()) async def _test_create_blob_from_bytes_with_index(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act await blob.upload_blob(data[3:]) # Assert db = await blob.download_blob() output = await db.content_as_bytes() self.assertEqual(data[3:], output) @record def test_create_blob_from_bytes_with_index(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_from_bytes_with_index()) async def _test_create_blob_from_bytes_with_index_and_count(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act await blob.upload_blob(data[3:], length=5) # Assert db = await blob.download_blob() output = await db.content_as_bytes() self.assertEqual(data[3:8], output) @record def test_create_blob_from_bytes_with_index_and_count(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_bytes_with_index_and_count()) async def _test_create_blob_from_bytes_with_index_and_count_and_properties( self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') await blob.upload_blob(data[3:], length=5, content_settings=content_settings) # Assert db = await blob.download_blob() output = await db.content_as_bytes() self.assertEqual(data[3:8], output) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) @record def test_create_blob_from_bytes_with_index_and_count_and_properties(self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_create_blob_from_bytes_with_index_and_count_and_properties()) async def _test_create_blob_from_bytes_non_parallel(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act await blob.upload_blob(data, length=LARGE_BLOB_SIZE, max_concurrency=1) # Assert await self.assertBlobEqual(self.container_name, blob.blob_name, data) @record def test_create_blob_from_bytes_non_parallel(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_bytes_non_parallel()) async def _test_create_blob_from_bytes_with_blob_tier_specified(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) data = b'hello world' blob_tier = StandardBlobTier.Cool # Act await blob_client.upload_blob(data, standard_blob_tier=blob_tier) blob_properties = await blob_client.get_blob_properties() # Assert self.assertEqual(blob_properties.blob_tier, blob_tier) @record def test_create_blob_from_bytes_with_blob_tier_specified_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_bytes_with_blob_tier_specified()) async def _test_create_blob_from_path(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: create_resp = await blob.upload_blob(stream) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) @record def test_create_blob_from_path(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_from_path()) async def _test_create_blob_from_path_non_parallel(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(100) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: create_resp = await blob.upload_blob(stream, length=100, max_concurrency=1) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) @record def test_create_blob_from_path_non_parallel(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_path_non_parallel()) async def _test_upload_blob_from_path_non_parallel_with_standard_blob_tier( self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(100) with open(FILE_PATH, 'wb') as stream: stream.write(data) blob_tier = StandardBlobTier.Cool # Act with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, length=100, max_concurrency=1, standard_blob_tier=blob_tier) props = await blob.get_blob_properties() # Assert self.assertEqual(props.blob_tier, blob_tier) @record def test_upload_blob_from_path_non_parallel_with_standard_blob_tier_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_upload_blob_from_path_non_parallel_with_standard_blob_tier()) async def _test_create_blob_from_path_with_progress(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, raw_response_hook=callback) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_create_blob_from_path_with_progress(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_path_with_progress()) async def _test_create_blob_from_path_with_properties(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, content_settings=content_settings) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) @record def test_create_blob_from_path_with_properties(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_path_with_properties()) async def _test_create_blob_from_stream_chunked_upload(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: create_resp = await blob.upload_blob(stream) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) @record def test_create_blob_from_stream_chunked_upload(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_stream_chunked_upload()) async def _test_create_blob_from_stream_non_seekable_chunked_upload_known_size( self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) blob_size = len(data) - 66 with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: non_seekable_file = StorageBlockBlobTestAsync.NonSeekableFile( stream) await blob.upload_blob(non_seekable_file, length=blob_size, max_concurrency=1) # Assert await self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) @record def test_create_blob_from_stream_non_seekable_chunked_upload_known_size( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_create_blob_from_stream_non_seekable_chunked_upload_known_size( )) async def _test_create_blob_from_stream_non_seekable_chunked_upload_unknown_size( self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: non_seekable_file = StorageBlockBlobTestAsync.NonSeekableFile( stream) await blob.upload_blob(non_seekable_file, max_concurrency=1) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) @record def test_create_blob_from_stream_non_seekable_chunked_upload_unknown_size( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_create_blob_from_stream_non_seekable_chunked_upload_unknown_size( )) async def _test_create_blob_from_stream_with_progress_chunked_upload(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, raw_response_hook=callback) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_create_blob_from_stream_with_progress_chunked_upload(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_stream_with_progress_chunked_upload()) async def _test_create_blob_from_stream_chunked_upload_with_count(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: resp = await blob.upload_blob(stream, length=blob_size) # Assert await self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) @record def test_create_blob_from_stream_chunked_upload_with_count(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_stream_chunked_upload_with_count()) async def _test_create_blob_from_stream_chunked_upload_with_count_and_properties( self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, length=blob_size, content_settings=content_settings) # Assert await self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) @record def test_create_blob_from_stream_chunked_upload_with_count_and_properties( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_create_blob_from_stream_chunked_upload_with_count_and_properties( )) async def _test_create_blob_from_stream_chunked_upload_with_properties( self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, content_settings=content_settings) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) @record def test_create_blob_from_stream_chunked_upload_with_properties(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_stream_chunked_upload_with_properties( )) async def _test_create_blob_from_stream_chunked_upload_with_properties( self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) blob_tier = StandardBlobTier.Cool # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, content_settings=content_settings, max_concurrency=2, standard_blob_tier=blob_tier) properties = await blob.get_blob_properties() # Assert self.assertEqual(properties.blob_tier, blob_tier) @record def test_create_blob_from_stream_chunked_upload_with_properties_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_stream_chunked_upload_with_properties( )) async def _test_create_blob_from_text(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) text = u'hello 啊齄丂狛狜 world' data = text.encode('utf-8') # Act create_resp = await blob.upload_blob(text) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assertEqual(props.etag, create_resp.get('etag')) self.assertEqual(props.last_modified, create_resp.get('last_modified')) @record def test_create_blob_from_text(self): if TestMode.need_recording_file(self.test_mode): return loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_from_text()) async def _test_create_blob_from_text_with_encoding(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) text = u'hello 啊齄丂狛狜 world' data = text.encode('utf-16') # Act await blob.upload_blob(text, encoding='utf-16') # Assert await self.assertBlobEqual(self.container_name, blob_name, data) @record def test_create_blob_from_text_with_encoding(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_text_with_encoding()) async def _test_create_blob_from_text_with_encoding_and_progress(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) text = u'hello 啊齄丂狛狜 world' data = text.encode('utf-16') # Act progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) await blob.upload_blob(text, encoding='utf-16', raw_response_hook=callback) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_create_blob_from_text_with_encoding_and_progress(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_text_with_encoding_and_progress()) async def _test_create_blob_from_text_chunked_upload(self): # parallel tests introduce random order of requests, can only run live await self._setup() if TestMode.need_recording_file(self.test_mode): return # Arrange blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_text_data(LARGE_BLOB_SIZE) encoded_data = data.encode('utf-8') # Act await blob.upload_blob(data) # Assert await self.assertBlobEqual(self.container_name, blob_name, encoded_data) # Assert await self.assertBlobEqual(self.container_name, blob_name, encoded_data) @record def test_create_blob_from_text_chunked_upload(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_blob_from_text_chunked_upload()) async def _test_create_blob_with_md5(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = b'hello world' # Act await blob.upload_blob(data, validate_content=True) # Assert @record def test_create_blob_with_md5(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_with_md5()) async def _test_create_blob_with_md5_chunked(self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act await blob.upload_blob(data, validate_content=True) # Assert @record def test_create_blob_with_md5_chunked(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_with_md5_chunked())
class StorageBlobTagsTest(AsyncStorageTestCase): async def _setup(self, storage_account, key): self.bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=key) self.container_name = self.get_resource_name("container") if self.is_live: container = self.bsc.get_container_client(self.container_name) try: await container.create_container(timeout=5) except ResourceExistsError: pass self.byte_data = self.get_random_bytes(1024) #--Helpers----------------------------------------------------------------- def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) async def _create_block_blob(self, tags=None, container_name=None, blob_name=None): blob_name = blob_name or self._get_blob_reference() blob_client = self.bsc.get_blob_client(container_name or self.container_name, blob_name) resp = await blob_client.upload_blob(self.byte_data, length=len(self.byte_data), overwrite=True, tags=tags) return blob_client, resp async def _create_empty_block_blob(self): blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) resp = await blob_client.upload_blob(b'', length=0, overwrite=True) return blob_client, resp async def _create_append_blob(self, tags=None): blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) resp = await blob_client.create_append_blob(tags=tags) return blob_client, resp async def _create_page_blob(self, tags=None): blob_name = self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) resp = await blob_client.create_page_blob(tags=tags, size=512) return blob_client, resp async def _create_container(self, prefix="container"): container_name = self.get_resource_name(prefix) try: await self.bsc.create_container(container_name) except: pass return container_name #-- test cases for blob tags ---------------------------------------------- @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_set_blob_tags(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) blob_client, _ = await self._create_block_blob() # Act tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} resp = await blob_client.set_blob_tags(tags) # Assert self.assertIsNotNone(resp) @pytest.mark.playback_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_set_blob_tags_for_a_version(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) # use this version to set tag blob_client, resp = await self._create_block_blob() await self._create_block_blob() # TODO: enable versionid for this account and test set tag for a version # Act tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} resp = await blob_client.set_blob_tags(tags, version_id=resp['version_id']) # Assert self.assertIsNotNone(resp) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_get_blob_tags(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) blob_client, resp = await self._create_block_blob() # Act tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} await blob_client.set_blob_tags(tags) resp = await blob_client.get_blob_tags() # Assert self.assertIsNotNone(resp) self.assertEqual(len(resp), 3) for key, value in resp.items(): self.assertEqual(tags[key], value) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_get_blob_tags_for_a_snapshot(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) tags = {"+-./:=_ ": "firsttag", "tag2": "+-./:=_", "+-./:=_1": "+-./:=_"} blob_client, resp = await self._create_block_blob(tags=tags) snapshot = await blob_client.create_snapshot() snapshot_client = self.bsc.get_blob_client(self.container_name, blob_client.blob_name, snapshot=snapshot) resp = await snapshot_client.get_blob_tags() # Assert self.assertIsNotNone(resp) self.assertEqual(len(resp), 3) for key, value in resp.items(): self.assertEqual(tags[key], value) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_upload_block_blob_with_tags(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} blob_client, resp = await self._create_block_blob(tags=tags) resp = await blob_client.get_blob_tags() # Assert self.assertIsNotNone(resp) self.assertEqual(len(resp), 3) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_get_blob_properties_returns_tags_num(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} blob_client, resp = await self._create_block_blob(tags=tags) resp = await blob_client.get_blob_properties() downloaded = await blob_client.download_blob() # Assert self.assertIsNotNone(resp) self.assertEqual(resp.tag_count, len(tags)) self.assertEqual(downloaded.properties.tag_count, len(tags)) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_create_append_blob_with_tags(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) tags = {"+-./:=_ ": "firsttag", "tag2": "+-./:=_", "+-./:=_1": "+-./:=_"} blob_client, resp = await self._create_append_blob(tags=tags) resp = await blob_client.get_blob_tags() # Assert self.assertIsNotNone(resp) self.assertEqual(len(resp), 3) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_create_page_blob_with_tags(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} blob_client, resp = await self._create_page_blob(tags=tags) resp = await blob_client.get_blob_tags() # Assert self.assertIsNotNone(resp) self.assertEqual(len(resp), 3) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_commit_block_list_with_tags(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} blob_client, resp = await self._create_empty_block_blob() await blob_client.stage_block('1', b'AAA') await blob_client.stage_block('2', b'BBB') await blob_client.stage_block('3', b'CCC') # Act block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')] await blob_client.commit_block_list(block_list, tags=tags) resp = await blob_client.get_blob_tags() # Assert self.assertIsNotNone(resp) self.assertEqual(len(resp), len(tags)) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_start_copy_from_url_with_tags(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} blob_client, resp = await self._create_block_blob() # Act sourceblob = '{0}/{1}/{2}'.format( self.account_url(storage_account, "blob"), self.container_name, blob_client.blob_name) copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy') copy = await copyblob.start_copy_from_url(sourceblob, tags=tags) # Assert self.assertIsNotNone(copy) self.assertEqual(copy['copy_status'], 'success') self.assertFalse(isinstance(copy['copy_status'], Enum)) self.assertIsNotNone(copy['copy_id']) copy_content = await (await copyblob.download_blob()).readall() self.assertEqual(copy_content, self.byte_data) resp = await copyblob.get_blob_tags() # Assert self.assertIsNotNone(resp) self.assertEqual(len(resp), len(tags)) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_list_blobs_returns_tags(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} await self._create_block_blob(tags=tags) container = self.bsc.get_container_client(self.container_name) blob_list = container.list_blobs(include="tags") #Assert async for blob in blob_list: self.assertEqual(blob.tag_count, len(tags)) for key, value in blob.tags.items(): self.assertEqual(tags[key], value) @GlobalResourceGroupPreparer() @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage') @AsyncStorageTestCase.await_prepared_test async def test_filter_blobs(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) container_name1 = await self._create_container(prefix="container1") container_name2 = await self._create_container(prefix="container2") container_name3 = await self._create_container(prefix="container3") tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"} await self._create_block_blob(tags=tags, blob_name="blob1") await self._create_block_blob(tags=tags, blob_name="blob2", container_name=container_name1) await self._create_block_blob(tags=tags, blob_name="blob3", container_name=container_name2) await self._create_block_blob(tags=tags, blob_name="blob4", container_name=container_name3) if self.is_live: sleep(10) where = "tag1='firsttag'" blob_list = self.bsc.find_blobs_by_tags(filter_expression=where, results_per_page=2).by_page() first_page = await blob_list.__anext__() items_on_page1 = list() async for item in first_page: items_on_page1.append(item) second_page = await blob_list.__anext__() items_on_page2 = list() async for item in second_page: items_on_page2.append(item) self.assertEqual(2, len(items_on_page1)) self.assertEqual(2, len(items_on_page2))
class BlobStorageAccountTestAsync(StorageTestCase): def setUp(self): super(BlobStorageAccountTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() self.bsc = BlobServiceClient(url, credential=credential, transport=AiohttpTestTransport()) self.container_name = self.get_resource_name('utcontainer') # if not self.is_playback(): # self.bsc.create_container(self.container_name) def tearDown(self): if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete(self.bsc.delete_container(self.container_name)) except: pass return super(BlobStorageAccountTestAsync, self).tearDown() # --Helpers----------------------------------------------------------------- async def _setup(self): if not self.is_playback(): try: await self.bsc.create_container(self.container_name) except: pass def _get_blob_reference(self): blob_name = self.get_resource_name(TEST_BLOB_PREFIX) return self.bsc.get_blob_client(self.container_name, blob_name) async def _create_blob(self): blob = self._get_blob_reference() await blob.upload_blob(b'') return blob async def assertBlobEqual(self, container_name, blob_name, expected_data): blob = self.bsc.get_blob_client(container_name, blob_name) actual_data = await blob.download_blob().content_as_bytes() self.assertEqual(actual_data, expected_data) # --Tests specific to Blob Storage Accounts (not general purpose)------------ async def _test_standard_blob_tier_set_tier_api(self): await self._setup() container = self.bsc.get_container_client(self.container_name) tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] for tier in tiers: blob = self._get_blob_reference() data = b'hello world' await blob.upload_blob(data) blob_ref = await blob.get_blob_properties() self.assertIsNotNone(blob_ref.blob_tier) self.assertTrue(blob_ref.blob_tier_inferred) self.assertIsNone(blob_ref.blob_tier_change_time) blobs = [] async for b in container.list_blobs(): blobs.append(b) # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob.blob_name) self.assertIsNotNone(blobs[0].blob_tier) self.assertTrue(blobs[0].blob_tier_inferred) self.assertIsNone(blobs[0].blob_tier_change_time) await blob.set_standard_blob_tier(tier) blob_ref2 = await blob.get_blob_properties() self.assertEqual(tier, blob_ref2.blob_tier) self.assertFalse(blob_ref2.blob_tier_inferred) self.assertIsNotNone(blob_ref2.blob_tier_change_time) blobs = [] async for b in container.list_blobs(): blobs.append(b) # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob.blob_name) self.assertEqual(blobs[0].blob_tier, tier) self.assertFalse(blobs[0].blob_tier_inferred) self.assertIsNotNone(blobs[0].blob_tier_change_time) await blob.delete_blob() @record def test_standard_blob_tier_set_tier_api(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_standard_blob_tier_set_tier_api()) async def _test_rehydration_status(self): await self._setup() blob_name = 'rehydration_test_blob_1' blob_name2 = 'rehydration_test_blob_2' container = self.bsc.get_container_client(self.container_name) data = b'hello world' blob = await container.upload_blob(blob_name, data) await blob.set_standard_blob_tier(StandardBlobTier.Archive) await blob.set_standard_blob_tier(StandardBlobTier.Cool) blob_ref = await blob.get_blob_properties() self.assertEqual(StandardBlobTier.Archive, blob_ref.blob_tier) self.assertEqual("rehydrate-pending-to-cool", blob_ref.archive_status) self.assertFalse(blob_ref.blob_tier_inferred) blobs = [] async for b in container.list_blobs(): blobs.append(b) await blob.delete_blob() # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob.blob_name) self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier) self.assertEqual("rehydrate-pending-to-cool", blobs[0].archive_status) self.assertFalse(blobs[0].blob_tier_inferred) blob2 = await container.upload_blob(blob_name2, data) await blob2.set_standard_blob_tier(StandardBlobTier.Archive) await blob2.set_standard_blob_tier(StandardBlobTier.Hot) blob_ref2 = await blob2.get_blob_properties() self.assertEqual(StandardBlobTier.Archive, blob_ref2.blob_tier) self.assertEqual("rehydrate-pending-to-hot", blob_ref2.archive_status) self.assertFalse(blob_ref2.blob_tier_inferred) blobs = [] async for b in container.list_blobs(): blobs.append(b) # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob2.blob_name) self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier) self.assertEqual("rehydrate-pending-to-hot", blobs[0].archive_status) self.assertFalse(blobs[0].blob_tier_inferred) @record def test_rehydration_status(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_rehydration_status())
class StorageBlobRetryTestAsync(StorageTestCase): def setUp(self): super(StorageBlobRetryTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() retry = ExponentialRetry(initial_backoff=1, increment_base=2, retry_total=3) self.bs = BlobServiceClient(url, credential=credential, retry_policy=retry) self.container_name = self.get_resource_name('utcontainer') def tearDown(self): if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete( self.bs.delete_container(self.container_name)) except: pass return super(StorageBlobRetryTestAsync, self).tearDown() # --Helpers----------------------------------------------------------------- async def _setup(self): if not self.is_playback(): try: await self.bs.create_container(self.container_name) except ResourceExistsError: pass class NonSeekableStream(IOBase): def __init__(self, wrapped_stream): self.wrapped_stream = wrapped_stream def write(self, data): self.wrapped_stream.write(data) def read(self, count): return self.wrapped_stream.read(count) def seek(self, *args, **kwargs): raise UnsupportedOperation("boom!") def tell(self): return self.wrapped_stream.tell() async def _test_retry_put_block_with_seekable_stream_async(self): if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = BytesIO(data) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = self.bs.get_blob_client(self.container_name, blob_name) await blob.stage_block( 1, data_stream, raw_response_hook=responder.override_first_status) # Assert _, uncommitted_blocks = await blob.get_block_list( block_list_type="uncommitted", raw_response_hook=responder.override_first_status) self.assertEqual(len(uncommitted_blocks), 1) self.assertEqual(uncommitted_blocks[0].size, PUT_BLOCK_SIZE) # Commit block and verify content await blob.commit_block_list( ['1'], raw_response_hook=responder.override_first_status) # Assert content = await (await blob.download_blob()).readall() self.assertEqual(content, data) def test_retry_put_block_with_seekable_stream_async(self): pytest.skip("Aiohttp closes stream after request - cannot rewind.") if TestMode.need_recording_file(self.test_mode): return loop = asyncio.get_event_loop() loop.run_until_complete( self._test_retry_put_block_with_seekable_stream_async()) async def _test_retry_put_block_with_non_seekable_stream_async(self): if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = self.NonSeekableStream(BytesIO(data)) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = self.bs.get_blob_client(self.container_name, blob_name) # Note: put_block transforms non-seekable streams into byte arrays before handing it off to the executor await blob.stage_block( 1, data_stream, raw_response_hook=responder.override_first_status) # Assert _, uncommitted_blocks = await blob.get_block_list( block_list_type="uncommitted", raw_response_hook=responder.override_first_status) self.assertEqual(len(uncommitted_blocks), 1) self.assertEqual(uncommitted_blocks[0].size, PUT_BLOCK_SIZE) # Commit block and verify content await blob.commit_block_list( ['1'], raw_response_hook=responder.override_first_status) # Assert content = await (await blob.download_blob()).readall() self.assertEqual(content, data) def test_retry_put_block_with_non_seekable_stream_async(self): if TestMode.need_recording_file(self.test_mode): return loop = asyncio.get_event_loop() loop.run_until_complete( self._test_retry_put_block_with_non_seekable_stream_async()) async def _test_retry_put_block_with_non_seekable_stream_fail_async(self): if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = self.NonSeekableStream(BytesIO(data)) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = self.bs.get_blob_client(self.container_name, blob_name) with self.assertRaises(HttpResponseError) as error: await blob.stage_block( 1, data_stream, length=PUT_BLOCK_SIZE, raw_response_hook=responder.override_first_status) # Assert self.assertEqual(error.exception.response.status_code, 408) def test_retry_put_block_with_non_seekable_stream_fail_async(self): if TestMode.need_recording_file(self.test_mode): return loop = asyncio.get_event_loop() loop.run_until_complete( self._test_retry_put_block_with_non_seekable_stream_fail_async())
class StorageLargeBlockBlobTestAsync(AsyncStorageTestCase): # --Helpers----------------------------------------------------------------- async def _setup(self, name, key): # test chunking functionality by reducing the threshold # for chunking and the size of each chunk, otherwise # the tests would take too long to execute self.bsc = BlobServiceClient(self.account_url(name, "blob"), credential=key, max_single_put_size=32 * 1024, max_block_size=2 * 1024 * 1024, min_large_block_upload_threshold=1 * 1024 * 1024, retry_total=0, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') if self.is_live: try: await self.bsc.create_container(self.container_name) except: pass def _teardown(self, file_name): if path.isfile(file_name): try: remove(file_name) except: pass def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) async def _create_blob(self): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(b'') return blob async def assertBlobEqual(self, container_name, blob_name, expected_data): blob = self.bsc.get_blob_client(container_name, blob_name) actual_data = await blob.download_blob() actual_bytes = b"" async for data in actual_data.chunks(): actual_bytes += data self.assertEqual(actual_bytes, expected_data) # --Test cases for block blobs -------------------------------------------- @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_bytes_large_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob = await self._create_blob() # Act futures = [] for i in range(5): futures.append( blob.stage_block('block {0}'.format(i).encode('utf-8'), urandom(LARGE_BLOCK_SIZE))) await asyncio.gather(*futures) # Assert @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_bytes_large_with_md5_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob = await self._create_blob() # Act for i in range(5): resp = await blob.stage_block( 'block {0}'.format(i).encode('utf-8'), urandom(LARGE_BLOCK_SIZE), validate_content=True) self.assertIsNone(resp) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_stream_large_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob = await self._create_blob() # Act for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) resp = await blob.stage_block( 'block {0}'.format(i).encode('utf-8'), stream, length=LARGE_BLOCK_SIZE) self.assertIsNone(resp) # Assert @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_stream_large_with_md5_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob = await self._create_blob() # Act for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) resp = resp = await blob.stage_block( 'block {0}'.format(i).encode('utf-8'), stream, length=LARGE_BLOCK_SIZE, validate_content=True) self.assertIsNone(resp) # Assert @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_large_blob_from_path_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'create_large_blob_from_path_async.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_large_blob_from_path_with_md5_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'reate_large_blob_from_path_with_md5_async.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, validate_content=True, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_large_blob_from_path_non_parallel_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(self.get_random_bytes(100)) FILE_PATH = 'large_blob_from_path_non_parallel_async.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=1) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_large_blob_from_path_with_progress_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) FILE_PATH = 'large_blob_from_path_with_progress_asyn.temp.dat' data = bytearray(urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2, raw_response_hook=callback) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_large_blob_from_path_with_properties_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'large_blob_from_path_with_properties_asy.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: content_settings = ContentSettings(content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, content_settings=content_settings, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_creat_lrg_blob_frm_stream_chnkd_upload_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'frm_stream_chnkd_upload_async.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_creat_lrgblob_frm_strm_w_prgrss_chnkduplod_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'frm_strm_w_prgrss_chnkduplod_async.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2, raw_response_hook=callback) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_creat_lrgblob_frm_strm_chnkd_uplod_w_cnt_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = '_lrgblob_frm_strm_chnkd_uplod_w_cnt_.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, length=blob_size, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_creat_lrg_frm_stream_chnk_upload_w_cntnprops( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'frm_stream_chnk_upload_w_cntnprops_async.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: content_settings = ContentSettings(content_type='image/png', content_language='spanish') blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, length=blob_size, content_settings=content_settings, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) finally: self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_large_from_stream_chunk_upld_with_props( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(urandom(LARGE_BLOB_SIZE)) FILE_PATH = 'from_stream_chunk_upld_with_props_async.temp.dat' with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act try: content_settings = ContentSettings(content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, content_settings=content_settings, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) finally: self._teardown(FILE_PATH)
class StorageBlockBlobAsyncTest(AsyncStorageTestCase): async def _setup(self, storage_account_name, key): # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=key, connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') # create source blob to be copied from self.source_blob_name = self.get_resource_name('srcblob') self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) if self.is_live: try: await self.bsc.create_container(self.container_name) except: pass await blob.upload_blob(self.source_blob_data, overwrite=True) # generate a SAS so that it is accessible with a URL sas_token = generate_blob_sas( blob.account_name, blob.container_name, blob.blob_name, snapshot=blob.snapshot, account_key=blob.credential.account_key, permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url self.source_blob_url_without_sas = blob.url @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_from_url_with_oauth(self, storage_account_name, storage_account_key): # Arrange await self._setup(storage_account_name, storage_account_key) split = 4 * 1024 destination_blob_name = self.get_resource_name('destblob') destination_blob_client = self.bsc.get_blob_client(self.container_name, destination_blob_name) access_token = await self.generate_oauth_token().get_token("https://storage.azure.com/.default") token = "Bearer {}".format(access_token.token) # Assert this operation fails without a credential with self.assertRaises(HttpResponseError): await destination_blob_client.stage_block_from_url( block_id=1, source_url=self.source_blob_url_without_sas, source_offset=0, source_length=split) # Assert it passes after passing an oauth credential await destination_blob_client.stage_block_from_url( block_id=1, source_url=self.source_blob_url_without_sas, source_offset=0, source_length=split, source_authorization=token) await destination_blob_client.stage_block_from_url( block_id=2, source_url=self.source_blob_url_without_sas, source_offset=split, source_length=split, source_authorization=token) committed, uncommitted = await destination_blob_client.get_block_list('all') self.assertEqual(len(uncommitted), 2) self.assertEqual(len(committed), 0) # Act part 2: commit the blocks await destination_blob_client.commit_block_list(['1', '2']) # Assert destination blob has right content destination_blob = await destination_blob_client.download_blob() destination_blob_data = await destination_blob.readall() self.assertEqual(len(destination_blob_data), 8 * 1024) self.assertEqual(destination_blob_data, self.source_blob_data) self.assertEqual(self.source_blob_data, destination_blob_data) @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_from_url_and_commit_async(self, storage_account_name, storage_account_key): # Arrange await self._setup(storage_account_name, storage_account_key) dest_blob_name = self.get_resource_name('destblob') dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act part 1: make put block from url calls split = 4 * 1024 futures = [ dest_blob.stage_block_from_url( block_id=1, source_url=self.source_blob_url, source_offset=0, source_length=split), dest_blob.stage_block_from_url( block_id=2, source_url=self.source_blob_url, source_offset=split, source_length=split)] await asyncio.gather(*futures) # Assert blocks committed, uncommitted = await dest_blob.get_block_list('all') self.assertEqual(len(uncommitted), 2) self.assertEqual(len(committed), 0) # Act part 2: commit the blocks await dest_blob.commit_block_list(['1', '2']) # Assert destination blob has right content content = await (await dest_blob.download_blob()).readall() self.assertEqual(content, self.source_blob_data) self.assertEqual(len(content), 8 * 1024) @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_from_url_and_vldte_content_md5(self, storage_account_name, storage_account_key): # Arrange await self._setup(storage_account_name, storage_account_key) dest_blob_name = self.get_resource_name('destblob') dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) src_md5 = StorageContentValidation.get_content_md5(self.source_blob_data) # Act part 1: put block from url with md5 validation await dest_blob.stage_block_from_url( block_id=1, source_url=self.source_blob_url, source_content_md5=src_md5, source_offset=0, source_length=8 * 1024) # Assert block was staged committed, uncommitted = await dest_blob.get_block_list('all') self.assertEqual(len(uncommitted), 1) self.assertEqual(len(committed), 0) # Act part 2: put block from url with wrong md5 fake_md5 = StorageContentValidation.get_content_md5(b"POTATO") with self.assertRaises(HttpResponseError) as error: await dest_blob.stage_block_from_url( block_id=2, source_url=self.source_blob_url, source_content_md5=fake_md5, source_offset=0, source_length=8 * 1024) self.assertEqual(error.exception.error_code, StorageErrorCode.md5_mismatch) # Assert block was not staged committed, uncommitted = await dest_blob.get_block_list('all') self.assertEqual(len(uncommitted), 1) self.assertEqual(len(committed), 0) @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_copy_blob_sync_async(self, storage_account_name, storage_account_key): # Arrange await self._setup(storage_account_name, storage_account_key) dest_blob_name = self.get_resource_name('destblob') dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act copy_props = await dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True) # Assert self.assertIsNotNone(copy_props) self.assertIsNotNone(copy_props['copy_id']) self.assertEqual('success', copy_props['copy_status']) # Verify content content = await (await dest_blob.download_blob()).readall() self.assertEqual(self.source_blob_data, content) @pytest.mark.playback_test_only @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_sync_copy_blob_returns_vid(self, storage_account_name, storage_account_key): # Arrange await self._setup(storage_account_name, storage_account_key) dest_blob_name = self.get_resource_name('destblob') dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act copy_props = await dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True) # Assert self.assertIsNotNone(copy_props['version_id']) self.assertIsNotNone(copy_props) self.assertIsNotNone(copy_props['copy_id']) self.assertEqual('success', copy_props['copy_status']) # Verify content content = await (await dest_blob.download_blob()).readall() self.assertEqual(self.source_blob_data, content)
class AzBlobManagerAsync: """A utility class to help working with Azure Blob Storage. This class implements asynchronous methods based on the Microsoft Python SDK azure.storage.blob.aio See: https://docs.microsoft.com/en-us/python/api/azure-storage-blob/azure.storage.blob.aio?view=azure-python Available: - Basic methods to work with containers and blobs """ @classmethod def create(cls, connection_string=None, account_url=None, credential=None): """Instantiate an asynchronous AzBlobManagerAsync object. Args: connection_string (str): A connection string to an Azure Storage account. account_url (str): The URL to the blob storage account. Any other entities included in the URL path (e.g. container or blob) will be discarded. This URL can be optionally authenticated with a SAS token. credential (str): The credentials with which to authenticate. This is optional if the account URL already has a SAS token, or the connection string already has shared access key values. The value can be a SAS token string, an account shared access key, or an instance of a TokenCredentials class from azure.identity. Credentials provided here will take precedence over those in the connection string. Returns: AzBlobManagerAsync object Examples: Creating the AzBlobManagerAsync with account url and a shared access key: azStorageManager = AzBlobManagerAsync.create(account_url=self.url, credential=self.shared_access_key) Creating the AzBlobManagerAsync with a connection string that has the shared access key: azStorageManager = AzBlobManagerAsync.create(connection_string='DefaultEndpointsProtocol=http;...') """ self = AzBlobManagerAsync() self.connection_string = connection_string self.account_url = account_url self.credential = credential from azure.storage.blob.aio import BlobServiceClient self.blob_service_client = BlobServiceClient if (self.connection_string is not None): # Create BlobServiceClient from a Connection String self.blob_service_client = BlobServiceClient.from_connection_string( conn_str=self.connection_string, credential=self.credential) else: # Creating the BlobServiceClient with account url and credential. self.blob_service_client = BlobServiceClient( account_url=self.account_url, credential=self.credential) return self def _logAzureError(self, err=AzureError): msg = err.message.split('\n')[0] logger.error(f'AzureError error: {msg}') async def create_container(self, container_name): """Creates a new container. Args: container_name (str): The name of the container. See https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata for naming convention Returns: bool: The return value. True for success, False otherwise. """ success = False try: new_container = await self.blob_service_client.create_container( container_name) properties = await new_container.get_container_properties() success = properties is not None and properties.name == container_name except ResourceExistsError: logger.info(f'Container \"{container_name}\" already exists.') except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('') return success async def delete_container(self, container_name): """Deletes a container. Args: container_name (str): The name of the container. Returns: bool: The return value. True for success, False otherwise. """ success = False try: await self.blob_service_client.delete_container(container_name) success = True except ResourceNotFoundError: logger.info(f'Container \"{container_name}\" doesn not exist.') except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('') return success async def _list_containers(self, name_starts_with=None, include_metadata=False): """Lists containers. Args: name_starts_with (str): Filters the results to return only containers whose names begin with the specified prefix. include_metadata (bool): Specifies that container metadata to be returned in the response. Returns: ItemPaged[ContainerProperties]: An iterable (auto-paging) of ContainerProperties. """ try: containers = [] async for container in self.blob_service_client.list_containers( name_starts_with=name_starts_with, include_metadata=include_metadata): containers.append(container) return containers except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('') return None async def list_containers_name(self, name_starts_with=None): """Lists containers' name. Args: name_starts_with (str): Filters the results to return only containers whose names begin with the specified prefix. Returns: list: A list of strings representing the container names. """ containers_list = [] containers = await self._list_containers( name_starts_with=name_starts_with, include_metadata=False) if (containers is None): return containers_list for container in containers: containers_list.append(container['name']) return containers_list async def create_append_blob(self, container_name, blob_name, replace_blob=False): """Creates an append blob in an existing container. Args: container_name (str): The name of the container. blob_name (str): The name of the blob. replace_blob (bool): If True, deletes existing blob with same name Returns: bool: The return value. True for success, False otherwise. """ success = False try: blob_client = self.blob_service_client.get_blob_client( container_name, blob_name) # raise ResourceNotFoundError if blob does not exist await blob_client.get_blob_properties() # blob exists already if (replace_blob is True): await blob_client.create_append_blob() success = True except ResourceNotFoundError: await blob_client.create_append_blob() success = True except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('') return success async def create_page_blob(self, container_name, blob_name, size=1024, content_settings=None, metadata=None, premium_page_blob_tier=None): """Creates a page blob in an existing container. Args: container_name (str): The name of the container. blob_name (str): The name of the blob. size (int): This specifies the maximum size for the page blob, up to 1 TB. The page blob size must be aligned to a 512-byte boundary content_settings (ContentSettings): ContentSettings object used to set blob properties. Used to set content type, encoding, language, disposition, md5, and cache control. metadata (dict(str, str)): Name-value pairs associated with the blob as metadata premium_page_blob_tier (PremiumPageBlobTier): A page blob tier value to set the blob to Returns: bool: The return value. True for success, False otherwise. """ success = False try: blob_client = self.blob_service_client.get_blob_client( container_name, blob_name) await blob_client.create_page_blob(size, content_settings, metadata, premium_page_blob_tier) success = True except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('') return success async def delete_blob(self, container_name, blob_name): """Deletes a blob. Args: container_name (str): The name of the container. blob_name (str): The name of the blob. Returns: bool: The return value. True for success, False otherwise. """ success = False try: blob_client = self.blob_service_client.get_blob_client( container_name, blob_name) await blob_client.delete_blob() success = True except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('') return success async def list_blobs(self, container_name): """Lists the blobs in the specified container. Args: container_name (str): The name of the container. Returns: list: A list of strings representing the blob names. """ blobs_list = [] try: container_client = self.blob_service_client.get_container_client( container_name) async for blob in container_client.list_blobs(): blobs_list.append(blob) except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception(f'Fatal error') return blobs_list async def upload_data(self, data, container_name, blob_name, blob_type='BlockBlob'): """Creates a new blob from a data source with automatic chunking. Args: data: The blob data to upload. container_name (str): The name of the container. blob_name (str): The name of the blob. blob_typr (str): The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob. Returns: bool: The return value. True for success, False otherwise. """ success = False try: blob_client = self.blob_service_client.get_blob_client( container_name, blob_name) await blob_client.upload_blob(data) success = True except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('') return success async def append_block(self, data, container_name, blob_name): """Commits a new block of data to the end of the existing append blob. Args: data: Content of the block. container_name (str): The name of the container. blob_name (str): The name of the blob. Returns: bool: The return value. True for success, False otherwise. """ success = False try: blob_client = self.blob_service_client.get_blob_client( container_name, blob_name) await blob_client.append_block(data) success = True except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('') return success async def download_data(self, container_name, blob_name): """Downloads a blob. Args: container_name (str): The name of the container. blob_name (str): The name of the blob. Returns: stream: The data stream """ try: blob_client = self.blob_service_client.get_blob_client( container_name, blob_name) stream = await blob_client.download_blob() return await stream.readall() except AzureError as err: self._logAzureError(err=err) except Exception: logger.exception('')
class StorageAppendBlobTestAsync(StorageTestCase): def setUp(self): super(StorageAppendBlobTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() self.bsc = BlobServiceClient(url, credential=credential, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') def tearDown(self): if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete( self.bsc.delete_container(self.container_name)) except: pass if os.path.isfile(FILE_PATH): try: os.remove(FILE_PATH) except: pass return super(StorageAppendBlobTestAsync, self).tearDown() #--Helpers----------------------------------------------------------------- async def _setup(self): if not self.is_playback(): try: await self.bsc.create_container(self.container_name) except: pass def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) async def _create_blob(self): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.create_append_blob() return blob async def assertBlobEqual(self, blob, expected_data): stream = await blob.download_blob() actual_data = await stream.content_as_bytes() self.assertEqual(actual_data, expected_data) class NonSeekableFile(object): def __init__(self, wrapped_file): self.wrapped_file = wrapped_file def write(self, data): self.wrapped_file.write(data) def read(self, count): return self.wrapped_file.read(count) #--Test cases for append blobs -------------------------------------------- async def _test_create_blob_async(self): # Arrange await self._setup() blob_name = self._get_blob_reference() # Act blob = self.bsc.get_blob_client(self.container_name, blob_name) create_resp = await blob.create_append_blob() # Assert blob_properties = await blob.get_blob_properties() self.assertIsNotNone(blob_properties) self.assertEqual(blob_properties.etag, create_resp.get('etag')) self.assertEqual(blob_properties.last_modified, create_resp.get('last_modified')) @record def test_create_blob_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_async()) async def _test_create_blob_with_lease_id_async(self): # Arrange await self._setup() blob = await self._create_blob() # Act lease = await blob.acquire_lease() create_resp = await blob.create_append_blob(lease=lease) # Assert blob_properties = await blob.get_blob_properties() self.assertIsNotNone(blob_properties) self.assertEqual(blob_properties.etag, create_resp.get('etag')) self.assertEqual(blob_properties.last_modified, create_resp.get('last_modified')) @record def test_create_blob_with_lease_id_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_with_lease_id_async()) async def _test_create_blob_with_metadata_async(self): # Arrange await self._setup() metadata = {'hello': 'world', 'number': '42'} blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.create_append_blob(metadata=metadata) # Assert md = await blob.get_blob_properties() self.assertDictEqual(md.metadata, metadata) @record def test_create_blob_with_metadata_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_blob_with_metadata_async()) async def _test_append_block_async(self): # Arrange await self._setup() blob = await self._create_blob() # Act for i in range(5): resp = await blob.append_block( u'block {0}'.format(i).encode('utf-8')) self.assertEqual(int(resp['blob_append_offset']), 7 * i) self.assertEqual(resp['blob_committed_block_count'], i + 1) self.assertIsNotNone(resp['etag']) self.assertIsNotNone(resp['last_modified']) # Assert await self.assertBlobEqual(blob, b'block 0block 1block 2block 3block 4') @record def test_append_block_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_block_async()) async def _test_append_block_unicode_async(self): # Arrange await self._setup() blob = await self._create_blob() # Act resp = await blob.append_block(u'啊齄丂狛狜', encoding='utf-16') self.assertEqual(int(resp['blob_append_offset']), 0) self.assertEqual(resp['blob_committed_block_count'], 1) self.assertIsNotNone(resp['etag']) self.assertIsNotNone(resp['last_modified']) # Assert @record def test_append_block_unicode_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_block_unicode_async()) async def _test_append_block_with_md5_async(self): # Arrange await self._setup() blob = await self._create_blob() # Act resp = await blob.append_block(b'block', validate_content=True) self.assertEqual(int(resp['blob_append_offset']), 0) self.assertEqual(resp['blob_committed_block_count'], 1) self.assertIsNotNone(resp['etag']) self.assertIsNotNone(resp['last_modified']) # Assert @record def test_append_block_with_md5_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_block_with_md5_async()) async def _test_create_append_blob_with_no_overwrite_async(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act create_resp = await blob.upload_blob(data1, overwrite=True, blob_type=BlobType.AppendBlob, metadata={'BlobData': 'Data1'}) update_resp = await blob.upload_blob(data2, overwrite=False, blob_type=BlobType.AppendBlob, metadata={'BlobData': 'Data2'}) props = await blob.get_blob_properties() # Assert appended_data = data1 + data2 await self.assertBlobEqual(blob, appended_data) self.assertEqual(props.etag, update_resp.get('etag')) self.assertEqual(props.blob_type, BlobType.AppendBlob) self.assertEqual(props.last_modified, update_resp.get('last_modified')) self.assertEqual(props.metadata, {'BlobData': 'Data1'}) self.assertEqual(props.size, LARGE_BLOB_SIZE + LARGE_BLOB_SIZE + 512) @record def test_create_append_blob_with_no_overwrite_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_append_blob_with_no_overwrite_async()) async def _test_create_append_blob_with_overwrite_async(self): # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data1 = self.get_random_bytes(LARGE_BLOB_SIZE) data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512) # Act create_resp = await blob.upload_blob(data1, overwrite=True, blob_type=BlobType.AppendBlob, metadata={'BlobData': 'Data1'}) update_resp = await blob.upload_blob(data2, overwrite=True, blob_type=BlobType.AppendBlob, metadata={'BlobData': 'Data2'}) props = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data2) self.assertEqual(props.etag, update_resp.get('etag')) self.assertEqual(props.last_modified, update_resp.get('last_modified')) self.assertEqual(props.metadata, {'BlobData': 'Data2'}) self.assertEqual(props.blob_type, BlobType.AppendBlob) self.assertEqual(props.size, LARGE_BLOB_SIZE + 512) @record def test_create_append_blob_with_overwrite_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_append_blob_with_overwrite_async()) async def _test_append_blob_from_bytes_async(self): # Arrange await self._setup() blob = await self._create_blob() # Act data = b'abcdefghijklmnopqrstuvwxyz' append_resp = await blob.upload_blob(data, blob_type=BlobType.AppendBlob) blob_properties = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data) self.assertEqual(blob_properties.etag, append_resp['etag']) self.assertEqual(blob_properties.last_modified, append_resp['last_modified']) @record def test_append_blob_from_bytes_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_blob_from_bytes_async()) async def _test_append_blob_from_0_bytes_async(self): # Arrange await self._setup() blob = await self._create_blob() # Act data = b'' append_resp = await blob.upload_blob(data, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data) # appending nothing should not make any network call self.assertIsNone(append_resp.get('etag')) self.assertIsNone(append_resp.get('last_modified')) @record def test_append_blob_from_0_bytes_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_blob_from_0_bytes_async()) async def _test_append_blob_from_bytes_with_progress_async(self): # Arrange await self._setup() blob = await self._create_blob() data = b'abcdefghijklmnopqrstuvwxyz' # Act progress = [] def progress_gen(upload): progress.append((0, len(upload))) yield upload upload_data = progress_gen(data) await blob.upload_blob(upload_data, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_append_blob_from_bytes_with_progress_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_bytes_with_progress_async()) async def _test_append_blob_from_bytes_with_index_async(self): # Arrange await self._setup() blob = await self._create_blob() # Act data = b'abcdefghijklmnopqrstuvwxyz' await blob.upload_blob(data[3:], blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data[3:]) @record def test_append_blob_from_bytes_with_index_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_bytes_with_index_async()) async def _test_append_blob_from_bytes_with_index_and_count_async(self): # Arrange await self._setup() blob = await self._create_blob() # Act data = b'abcdefghijklmnopqrstuvwxyz' await blob.upload_blob(data[3:], length=5, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data[3:8]) @record def test_append_blob_from_bytes_with_index_and_count_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_bytes_with_index_and_count_async()) async def _test_append_blob_from_bytes_chunked_upload_async(self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act append_resp = await blob.upload_blob(data, blob_type=BlobType.AppendBlob) blob_properties = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data) self.assertEqual(blob_properties.etag, append_resp['etag']) self.assertEqual(blob_properties.last_modified, append_resp.get('last_modified')) @record def test_append_blob_from_bytes_chunked_upload_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_bytes_chunked_upload_async()) async def _test_append_blob_from_bytes_with_progress_chunked_upload_async( self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) # Act progress = [] def progress_gen(upload): n = self.config.max_block_size total = len(upload) current = 0 while upload: progress.append((current, total)) yield upload[:n] current += len(upload[:n]) upload = upload[n:] upload_data = progress_gen(data) await blob.upload_blob(upload_data, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_append_blob_from_bytes_with_progress_chunked_upload_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_append_blob_from_bytes_with_progress_chunked_upload_async()) async def _test_append_blob_from_bytes_chunked_upload_with_index_and_count_async( self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) index = 33 blob_size = len(data) - 66 # Act await blob.upload_blob(data[index:], length=blob_size, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data[index:index + blob_size]) @record def test_append_blob_from_bytes_chunked_upload_with_index_and_count_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_append_blob_from_bytes_chunked_upload_with_index_and_count_async( )) async def _test_append_blob_from_path_chunked_upload_async(self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: append_resp = await blob.upload_blob(stream, blob_type=BlobType.AppendBlob) blob_properties = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data) self.assertEqual(blob_properties.etag, append_resp.get('etag')) self.assertEqual(blob_properties.last_modified, append_resp.get('last_modified')) @record def test_append_blob_from_path_chunked_upload_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_path_chunked_upload_async()) async def _test_append_blob_from_path_with_progress_chunked_upload_async( self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act progress = [] def progress_gen(upload): n = self.config.max_block_size total = LARGE_BLOB_SIZE current = 0 while upload: chunk = upload.read(n) if not chunk: break progress.append((current, total)) yield chunk current += len(chunk) with open(FILE_PATH, 'rb') as stream: upload_data = progress_gen(stream) await blob.upload_blob(upload_data, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_append_blob_from_path_with_progress_chunked_upload_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_append_blob_from_path_with_progress_chunked_upload_async()) async def _test_append_blob_from_stream_chunked_upload_async(self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: append_resp = await blob.upload_blob(stream, blob_type=BlobType.AppendBlob) blob_properties = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data) self.assertEqual(blob_properties.etag, append_resp.get('etag')) self.assertEqual(blob_properties.last_modified, append_resp.get('last_modified')) @record def test_append_blob_from_stream_chunked_upload_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_stream_chunked_upload_async()) async def _test_append_blob_from_stream_non_seekable_chunked_upload_known_size_async( self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) blob_size = len(data) - 66 # Act with open(FILE_PATH, 'rb') as stream: non_seekable_file = StorageAppendBlobTestAsync.NonSeekableFile( stream) await blob.upload_blob(non_seekable_file, length=blob_size, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data[:blob_size]) @record def test_append_blob_from_stream_non_seekable_chunked_upload_known_size_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_append_blob_from_stream_non_seekable_chunked_upload_known_size_async( )) async def _test_append_blob_from_stream_non_seekable_chunked_upload_unknown_size_async( self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: non_seekable_file = StorageAppendBlobTestAsync.NonSeekableFile( stream) await blob.upload_blob(non_seekable_file, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data) @record def test_append_blob_from_stream_non_seekable_chunked_upload_unknown_size_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_append_blob_from_stream_non_seekable_chunked_upload_unknown_size_async( )) async def _test_append_blob_from_stream_with_multiple_appends_async(self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream1: stream1.write(data) with open(FILE_PATH, 'wb') as stream2: stream2.write(data) # Act with open(FILE_PATH, 'rb') as stream1: await blob.upload_blob(stream1, blob_type=BlobType.AppendBlob) with open(FILE_PATH, 'rb') as stream2: await blob.upload_blob(stream2, blob_type=BlobType.AppendBlob) # Assert data = data * 2 await self.assertBlobEqual(blob, data) @record def test_append_blob_from_stream_with_multiple_appends_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_stream_with_multiple_appends_async()) async def _test_append_blob_from_stream_chunked_upload_with_count_async( self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, length=blob_size, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data[:blob_size]) @record def test_append_blob_from_stream_chunked_upload_with_count_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_stream_chunked_upload_with_count_async( )) async def _test_append_blob_from_stream_chunked_upload_with_count_parallel_async( self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_bytes(LARGE_BLOB_SIZE) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: append_resp = await blob.upload_blob(stream, length=blob_size, blob_type=BlobType.AppendBlob) blob_properties = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data[:blob_size]) self.assertEqual(blob_properties.etag, append_resp.get('etag')) self.assertEqual(blob_properties.last_modified, append_resp.get('last_modified')) @record def test_append_blob_from_stream_chunked_upload_with_count_parallel_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_append_blob_from_stream_chunked_upload_with_count_parallel_async( )) async def _test_append_blob_from_text_async(self): # Arrange await self._setup() blob = await self._create_blob() text = u'hello 啊齄丂狛狜 world' data = text.encode('utf-8') # Act append_resp = await blob.upload_blob(text, blob_type=BlobType.AppendBlob) blob_properties = await blob.get_blob_properties() # Assert await self.assertBlobEqual(blob, data) self.assertEqual(blob_properties.etag, append_resp.get('etag')) self.assertEqual(blob_properties.last_modified, append_resp.get('last_modified')) @record def test_append_blob_from_text_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_blob_from_text_async()) async def _test_append_blob_from_text_with_encoding_async(self): # Arrange await self._setup() blob = await self._create_blob() text = u'hello 啊齄丂狛狜 world' data = text.encode('utf-16') # Act await blob.upload_blob(text, encoding='utf-16', blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, data) @record def test_append_blob_from_text_with_encoding_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_text_with_encoding_async()) async def _test_append_blob_from_text_with_encoding_and_progress_async( self): # Arrange await self._setup() blob = await self._create_blob() text = u'hello 啊齄丂狛狜 world' data = text.encode('utf-16') # Act progress = [] def progress_gen(upload): progress.append((0, len(data))) yield upload upload_data = progress_gen(text) await blob.upload_blob(upload_data, encoding='utf-16', blob_type=BlobType.AppendBlob) # Assert self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_append_blob_from_text_with_encoding_and_progress_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_text_with_encoding_and_progress_async( )) async def _test_append_blob_from_text_chunked_upload_async(self): # Arrange await self._setup() blob = await self._create_blob() data = self.get_random_text_data(LARGE_BLOB_SIZE) encoded_data = data.encode('utf-8') # Act await blob.upload_blob(data, blob_type=BlobType.AppendBlob) # Assert await self.assertBlobEqual(blob, encoded_data) @record def test_append_blob_from_text_chunked_upload_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_append_blob_from_text_chunked_upload_async()) async def _test_append_blob_with_md5_async(self): # Arrange await self._setup() blob = await self._create_blob() data = b'hello world' # Act await blob.append_block(data, validate_content=True) # Assert @record def test_append_blob_with_md5_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_blob_with_md5_async())
class StorageLoggingTestAsync(StorageTestCase): def setUp(self): super(StorageLoggingTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() self.bsc = BlobServiceClient(url, credential=credential, transport=AiohttpTestTransport()) self.container_name = self.get_resource_name('utcontainer') def tearDown(self): if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete( self.bsc.delete_container(self.container_name)) except: pass return super(StorageLoggingTestAsync, self).tearDown() async def _setup(self): if not self.is_playback(): try: # create source blob to be copied from self.source_blob_name = self.get_resource_name('srcblob') self.source_blob_data = self.get_random_bytes(4 * 1024) source_blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) await self.bsc.create_container(self.container_name) await source_blob.upload_blob(self.source_blob_data) # generate a SAS so that it is accessible with a URL sas_token = source_blob.generate_shared_access_signature( permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=1), ) sas_source = BlobClient(source_blob.url, credential=sas_token) self.source_blob_url = sas_source.url except: pass async def _test_authorization_is_scrubbed_off(self): await self._setup() # Arrange container = self.bsc.get_container_client(self.container_name) # Act with LogCaptured(self) as log_captured: await container.get_container_properties(logging_enable=True) log_as_str = log_captured.getvalue() # Assert # make sure authorization header is logged, but its value is not # the keyword SharedKey is present in the authorization header's value self.assertTrue(_AUTHORIZATION_HEADER_NAME in log_as_str) self.assertFalse('SharedKey' in log_as_str) @record def test_authorization_is_scrubbed_off(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_authorization_is_scrubbed_off()) async def _test_sas_signature_is_scrubbed_off(self): # Test can only run live if TestMode.need_recording_file(self.test_mode): return await self._setup() # Arrange container = self.bsc.get_container_client(self.container_name) token = container.generate_shared_access_signature( permission=ContainerPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=1), ) # parse out the signed signature token_components = parse_qs(token) signed_signature = quote( token_components[QueryStringConstants.SIGNED_SIGNATURE][0]) sas_service = ContainerClient(container.url, credential=token) # Act with LogCaptured(self) as log_captured: await sas_service.get_account_information(logging_enable=True) log_as_str = log_captured.getvalue() # Assert # make sure the query parameter 'sig' is logged, but its value is not self.assertTrue( QueryStringConstants.SIGNED_SIGNATURE in log_as_str) self.assertFalse(signed_signature in log_as_str) @record def test_sas_signature_is_scrubbed_off(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_sas_signature_is_scrubbed_off()) async def _test_copy_source_sas_is_scrubbed_off(self): # Test can only run live if TestMode.need_recording_file(self.test_mode): return await self._setup() # Arrange dest_blob_name = self.get_resource_name('destblob') dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # parse out the signed signature token_components = parse_qs(self.source_blob_url) signed_signature = quote( token_components[QueryStringConstants.SIGNED_SIGNATURE][0]) # Act with LogCaptured(self) as log_captured: await dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True, logging_enable=True) log_as_str = log_captured.getvalue() # Assert # make sure the query parameter 'sig' is logged, but its value is not self.assertTrue( QueryStringConstants.SIGNED_SIGNATURE in log_as_str) self.assertFalse(signed_signature in log_as_str) # make sure authorization header is logged, but its value is not # the keyword SharedKey is present in the authorization header's value self.assertTrue(_AUTHORIZATION_HEADER_NAME in log_as_str) self.assertFalse('SharedKey' in log_as_str) @record def test_copy_source_sas_is_scrubbed_off(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_copy_source_sas_is_scrubbed_off())
class StorageBlobEncryptionTestAsync(AsyncStorageTestCase): # --Helpers----------------------------------------------------------------- async def _setup(self, storage_account, key): # test chunking functionality by reducing the threshold # for chunking and the size of each chunk, otherwise # the tests would take too long to execute self.bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=key, max_single_put_size=32 * 1024, max_block_size=4 * 1024, max_page_size=4 * 1024, max_single_get_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') self.blob_types = (BlobType.BlockBlob, BlobType.PageBlob, BlobType.AppendBlob) self.bytes = b'Foo' if self.is_live: container = self.bsc.get_container_client(self.container_name) try: await container.create_container() except: pass def _teardown(self, file_name): if path.isfile(file_name): try: remove(file_name) except: pass def _get_container_reference(self): return self.get_resource_name(TEST_CONTAINER_PREFIX) def _get_blob_reference(self, blob_type): return self.get_resource_name(TEST_BLOB_PREFIXES[blob_type.value]) async def _create_small_blob(self, blob_type): blob_name = self._get_blob_reference(blob_type) blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(self.bytes, blob_type=blob_type) return blob # --Test cases for blob encryption ---------------------------------------- @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_missing_attribute_kek_wrap_async(self, resource_group, location, storage_account, storage_account_key): # In the shared method _generate_blob_encryption_key await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True valid_key = KeyWrapper('key1') # Act invalid_key_1 = lambda: None # functions are objects, so this effectively creates an empty object invalid_key_1.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm invalid_key_1.get_kid = valid_key.get_kid # No attribute wrap_key self.bsc.key_encryption_key = invalid_key_1 with self.assertRaises(AttributeError): await self._create_small_blob(BlobType.BlockBlob) invalid_key_2 = lambda: None # functions are objects, so this effectively creates an empty object invalid_key_2.wrap_key = valid_key.wrap_key invalid_key_2.get_kid = valid_key.get_kid # No attribute get_key_wrap_algorithm self.bsc.key_encryption_key = invalid_key_2 with self.assertRaises(AttributeError): await self._create_small_blob(BlobType.BlockBlob) invalid_key_3 = lambda: None # functions are objects, so this effectively creates an empty object invalid_key_3.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm invalid_key_3.wrap_key = valid_key.wrap_key # No attribute get_kid self.bsc.key_encryption_key = invalid_key_2 with self.assertRaises(AttributeError): await self._create_small_blob(BlobType.BlockBlob) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_invalid_value_kek_wrap_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.key_encryption_key.get_key_wrap_algorithm = None try: await self._create_small_blob(BlobType.BlockBlob) self.fail() except AttributeError as e: self.assertEqual( str(e), _ERROR_OBJECT_INVALID.format('key encryption key', 'get_key_wrap_algorithm')) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.key_encryption_key.get_kid = None with self.assertRaises(AttributeError): await self._create_small_blob(BlobType.BlockBlob) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.key_encryption_key.wrap_key = None with self.assertRaises(AttributeError): await self._create_small_blob(BlobType.BlockBlob) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_missing_attribute_kek_unwrap_async(self, resource_group, location, storage_account, storage_account_key): # Shared between all services in decrypt_blob await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True valid_key = KeyWrapper('key1') self.bsc.key_encryption_key = valid_key blob = await self._create_small_blob(BlobType.BlockBlob) # Act # Note that KeyWrapper has a default value for key_id, so these Exceptions # are not due to non_matching kids. invalid_key_1 = lambda: None #functions are objects, so this effectively creates an empty object invalid_key_1.get_kid = valid_key.get_kid #No attribute unwrap_key blob.key_encryption_key = invalid_key_1 with self.assertRaises(HttpResponseError): await (await blob.download_blob()).content_as_bytes() invalid_key_2 = lambda: None #functions are objects, so this effectively creates an empty object invalid_key_2.unwrap_key = valid_key.unwrap_key blob.key_encryption_key = invalid_key_2 #No attribute get_kid with self.assertRaises(HttpResponseError): await (await blob.download_blob()).content_as_bytes() @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_invalid_value_kek_unwrap_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') blob = await self._create_small_blob(BlobType.BlockBlob) # Act blob.key_encryption_key = KeyWrapper('key1') blob.key_encryption_key.unwrap_key = None with self.assertRaises(HttpResponseError) as e: await (await blob.download_blob()).content_as_bytes() self.assertEqual(str(e.exception), 'Decryption failed.') @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_kek_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') blob = await self._create_small_blob(BlobType.BlockBlob) # Act content = await (await blob.download_blob()).content_as_bytes() # Assert self.assertEqual(content, self.bytes) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_resolver_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') key_resolver = KeyResolver() key_resolver.put_key(self.bsc.key_encryption_key) self.bsc.key_resolver_function = key_resolver.resolve_key blob = await self._create_small_blob(BlobType.BlockBlob) # Act self.bsc.key_encryption_key = None content = await (await blob.download_blob()).content_as_bytes() # Assert self.assertEqual(content, self.bytes) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_kek_RSA_async(self, resource_group, location, storage_account, storage_account_key): # We can only generate random RSA keys, so this must be run live or # the playback test will fail due to a change in kek values. await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = RSAKeyWrapper('key2') blob = await self._create_small_blob(BlobType.BlockBlob) # Act content = await blob.download_blob() data = b"" async for d in content.chunks(): data += d # Assert self.assertEqual(data, self.bytes) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_nonmatching_kid_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') blob = await self._create_small_blob(BlobType.BlockBlob) # Act self.bsc.key_encryption_key.kid = 'Invalid' # Assert with self.assertRaises(HttpResponseError) as e: await (await blob.download_blob()).content_as_bytes() self.assertEqual(str(e.exception), 'Decryption failed.') @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_blob_invalid_stream_type_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') small_stream = StringIO(u'small') large_stream = StringIO(u'large' * self.config.max_single_put_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert # Block blob specific single shot with self.assertRaises(TypeError) as e: await blob.upload_blob(small_stream, length=5) self.assertTrue( 'Blob data should be of type bytes.' in str(e.exception)) # Generic blob chunked with self.assertRaises(TypeError) as e: await blob.upload_blob(large_stream) self.assertTrue( 'Blob data should be of type bytes.' in str(e.exception)) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_blob_chnking_reqd_mult_of_block_size_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size + self.config.max_block_size) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, max_concurrency=3) blob_content = await (await blob.download_blob()).content_as_bytes( max_concurrency=3) # Assert self.assertEqual(content, blob_content) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_blob_chnking_reqd_non_mult_of_block_size_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = urandom(self.config.max_single_put_size + 1) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, max_concurrency=3) blob_content = await (await blob.download_blob()).content_as_bytes( max_concurrency=3) # Assert self.assertEqual(content, blob_content) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_blob_chunking_required_range_specified_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size * 2) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, length=self.config.max_single_put_size + 53, max_concurrency=3) blob_content = await (await blob.download_blob()).content_as_bytes( max_concurrency=3) # Assert self.assertEqual(content[:self.config.max_single_put_size + 53], blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_blob_single_shot_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = b'small' blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content) blob_content = await (await blob.download_blob()).content_as_bytes() # Assert self.assertEqual(content, blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_blob_range_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') content = b'Random repeats' * self.config.max_single_put_size * 5 # All page blob uploads call _upload_chunks, so this will test the ability # of that function to handle ranges even though it's a small blob blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content[2:], length=self.config.max_single_put_size + 5, max_concurrency=1) blob_content = await (await blob.download_blob()).content_as_bytes( max_concurrency=1) # Assert self.assertEqual(content[2:2 + self.config.max_single_put_size + 5], blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_blob_empty_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = b'' blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content) blob_content = await (await blob.download_blob()).content_as_bytes( max_concurrency=2) # Assert self.assertEqual(content, blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_blob_serial_upload_chunking_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(self.config.max_single_put_size + 1) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, max_concurrency=1) blob_content = await (await blob.download_blob()).content_as_bytes( max_concurrency=1) # Assert self.assertEqual(content, blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_beginning_to_middle_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, max_concurrency=1) blob_content = await (await blob.download_blob( offset=0, length=50)).content_as_bytes(max_concurrency=1) # Assert self.assertEqual(content[:50], blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_middle_to_end_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content, max_concurrency=1) blob_content = await (await blob.download_blob( offset=100, length=28)).content_as_bytes() blob_content2 = await (await blob.download_blob(offset=100 )).content_as_bytes() # Assert self.assertEqual(content[100:], blob_content) self.assertEqual(content[100:], blob_content2) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_middle_to_middle_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content) blob_content = await (await blob.download_blob( offset=5, length=93)).content_as_bytes() # Assert self.assertEqual(content[5:98], blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_aligns_on_16_byte_block_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content) blob_content = await (await blob.download_blob( offset=48, length=16)).content_as_bytes() # Assert self.assertEqual(content[48:64], blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_expnded_to_begin_bloc_align_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content) blob_content = await (await blob.download_blob( offset=5, length=50)).content_as_bytes() # Assert self.assertEqual(content[5:55], blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_expanded_to_beginning_iv_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') self.bsc.require_encryption = True content = self.get_random_bytes(128) blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Act await blob.upload_blob(content) blob_content = await (await blob.download_blob( offset=22, length=20)).content_as_bytes() # Assert self.assertEqual(content[22:42], blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_blob_strict_mode_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True content = urandom(512) # Assert for service in self.blob_types: blob_name = self._get_blob_reference(service) blob = self.bsc.get_blob_client(self.container_name, blob_name) with self.assertRaises(ValueError): await blob.upload_blob(content, blob_type=service) stream = BytesIO(content) with self.assertRaises(ValueError): await blob.upload_blob(stream, length=512, blob_type=service) file_name = 'strict_mode_async.temp.dat' with open(file_name, 'wb') as stream: stream.write(content) with open(file_name, 'rb') as stream: with self.assertRaises(ValueError): await blob.upload_blob(stream, blob_type=service) with self.assertRaises(ValueError): await blob.upload_blob('To encrypt', blob_type=service) self._teardown(file_name) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_strict_mode_no_policy_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') blob = await self._create_small_blob(BlobType.BlockBlob) # Act blob.key_encryption_key = None # Assert with self.assertRaises(ValueError): await (await blob.download_blob()).content_as_bytes() @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_strict_mode_unencrypted_blob_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) blob = await self._create_small_blob(BlobType.BlockBlob) # Act blob.require_encryption = True blob.key_encryption_key = KeyWrapper('key1') # Assert with self.assertRaises(HttpResponseError): await (await blob.download_blob()).content_as_bytes() @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_invalid_methods_fail_block_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') blob_name = self._get_blob_reference(BlobType.BlockBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with self.assertRaises(ValueError) as e: await blob.stage_block('block1', urandom(32)) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) with self.assertRaises(ValueError) as e: await blob.commit_block_list(['block1']) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_invalid_methods_fail_append_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') blob_name = self._get_blob_reference(BlobType.AppendBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with self.assertRaises(ValueError) as e: await blob.append_block(urandom(32)) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) with self.assertRaises(ValueError) as e: await blob.create_append_blob() self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) # All append_from operations funnel into append_from_stream, so testing one is sufficient with self.assertRaises(ValueError) as e: await blob.upload_blob(b'To encrypt', blob_type=BlobType.AppendBlob) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_invalid_methods_fail_page_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.key_encryption_key = KeyWrapper('key1') blob_name = self._get_blob_reference(BlobType.PageBlob) blob = self.bsc.get_blob_client(self.container_name, blob_name) # Assert with self.assertRaises(ValueError) as e: await blob.upload_page(urandom(512), offset=0, length=512, blob_type=BlobType.PageBlob) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) with self.assertRaises(ValueError) as e: await blob.create_page_blob(512) self.assertEqual(str(e.exception), _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_validate_encryption_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True kek = KeyWrapper('key1') self.bsc.key_encryption_key = kek blob = await self._create_small_blob(BlobType.BlockBlob) # Act blob.require_encryption = False blob.key_encryption_key = None content = await blob.download_blob() data = await content.content_as_bytes() encryption_data = _dict_to_encryption_data( loads(content.properties.metadata['encryptiondata'])) iv = encryption_data.content_encryption_IV content_encryption_key = _validate_and_unwrap_cek( encryption_data, kek, None) cipher = _generate_AES_CBC_cipher(content_encryption_key, iv) decryptor = cipher.decryptor() unpadder = PKCS7(128).unpadder() content = decryptor.update(data) + decryptor.finalize() content = unpadder.update(content) + unpadder.finalize() self.assertEqual(self.bytes, content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_block_blob_from_star_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) await self._create_blob_from_star(BlobType.BlockBlob, "blob1", self.bytes, self.bytes) stream = BytesIO(self.bytes) await self._create_blob_from_star(BlobType.BlockBlob, "blob2", self.bytes, stream) file_name = 'block_star_async.temp.dat' with open(file_name, 'wb') as stream: stream.write(self.bytes) with open(file_name, 'rb') as stream: await self._create_blob_from_star(BlobType.BlockBlob, "blob3", self.bytes, stream) await self._create_blob_from_star(BlobType.BlockBlob, "blob4", b'To encrypt', 'To encrypt') self._teardown(file_name) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_page_blob_from_star_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) content = self.get_random_bytes(512) await self._create_blob_from_star(BlobType.PageBlob, "blob1", content, content) stream = BytesIO(content) await self._create_blob_from_star(BlobType.PageBlob, "blob2", content, stream, length=512) file_name = 'page_star_async.temp.dat' with open(file_name, 'wb') as stream: stream.write(content) with open(file_name, 'rb') as stream: await self._create_blob_from_star(BlobType.PageBlob, "blob3", content, stream) self._teardown(file_name) async def _create_blob_from_star(self, blob_type, blob_name, content, data, **kwargs): blob = self.bsc.get_blob_client(self.container_name, blob_name) blob.key_encryption_key = KeyWrapper('key1') blob.require_encryption = True await blob.upload_blob(data, blob_type=blob_type, **kwargs) blob_content = await (await blob.download_blob()).content_as_bytes() self.assertEqual(content, blob_content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_star_async(self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account, storage_account_key) self.bsc.require_encryption = True self.bsc.key_encryption_key = KeyWrapper('key1') blob = await self._create_small_blob(BlobType.BlockBlob) # Act content = await blob.download_blob() iter_blob = b"" async for data in content.chunks(): iter_blob += data bytes_blob = await (await blob.download_blob()).content_as_bytes() stream_blob = BytesIO() await (await blob.download_blob()).download_to_stream(stream_blob) stream_blob.seek(0) text_blob = await (await blob.download_blob()).content_as_text() # Assert self.assertEqual(self.bytes, iter_blob) self.assertEqual(self.bytes, bytes_blob) self.assertEqual(self.bytes, stream_blob.read()) self.assertEqual(self.bytes.decode(), text_blob)
class StorageBlockBlobTestAsync(StorageTestCase): def setUp(self): super(StorageBlockBlobTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( url, credential=credential, connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') # create source blob to be copied from self.source_blob_name = self.get_resource_name('srcblob') self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) # generate a SAS so that it is accessible with a URL sas_token = blob.generate_shared_access_signature( permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url def tearDown(self): if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete(self.bsc.delete_container(self.container_name)) except: pass return super(StorageBlockBlobTestAsync, self).tearDown() async def _setup(self): blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) if not self.is_playback(): try: await self.bsc.create_container(self.container_name) except: pass await blob.upload_blob(self.source_blob_data, overwrite=True) # generate a SAS so that it is accessible with a URL sas_token = blob.generate_shared_access_signature( permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url async def _test_put_block_from_url_and_commit_async(self): # Arrange await self._setup() split = 4 * 1024 dest_blob_name = self.get_resource_name('destblob') dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act part 1: make put block from url calls futures = [ dest_blob.stage_block_from_url( block_id=1, source_url=self.source_blob_url, source_offset=0, source_length=split), dest_blob.stage_block_from_url( block_id=2, source_url=self.source_blob_url, source_offset=split, source_length=split)] await asyncio.gather(*futures) # Assert blocks committed, uncommitted = await dest_blob.get_block_list('all') self.assertEqual(len(uncommitted), 2) self.assertEqual(len(committed), 0) # Act part 2: commit the blocks await dest_blob.commit_block_list(['1', '2']) # Assert destination blob has right content content = await (await dest_blob.download_blob()).content_as_bytes() self.assertEqual(content, self.source_blob_data) self.assertEqual(len(content), 8 * 1024) @record def test_put_block_from_url_and_commit_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_from_url_and_commit_async()) async def _test_put_block_from_url_and_validate_content_md5_async(self): # Arrange await self._setup() dest_blob_name = self.get_resource_name('destblob') dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) src_md5 = StorageContentValidation.get_content_md5(self.source_blob_data) # Act part 1: put block from url with md5 validation await dest_blob.stage_block_from_url( block_id=1, source_url=self.source_blob_url, source_content_md5=src_md5, source_offset=0, source_length=8 * 1024) # Assert block was staged committed, uncommitted = await dest_blob.get_block_list('all') self.assertEqual(len(uncommitted), 1) self.assertEqual(len(committed), 0) # Act part 2: put block from url with wrong md5 fake_md5 = StorageContentValidation.get_content_md5(b"POTATO") with self.assertRaises(HttpResponseError) as error: await dest_blob.stage_block_from_url( block_id=2, source_url=self.source_blob_url, source_content_md5=fake_md5, source_offset=0, source_length=8 * 1024) self.assertEqual(error.exception.error_code, StorageErrorCode.md5_mismatch) # Assert block was not staged committed, uncommitted = await dest_blob.get_block_list('all') self.assertEqual(len(uncommitted), 1) self.assertEqual(len(committed), 0) @record def test_put_block_from_url_and_validate_content_md5_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_from_url_and_validate_content_md5_async()) async def _test_copy_blob_sync_async(self): # Arrange await self._setup() dest_blob_name = self.get_resource_name('destblob') dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name) # Act copy_props = await dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True) # Assert self.assertIsNotNone(copy_props) self.assertIsNotNone(copy_props['copy_id']) self.assertEqual('success', copy_props['copy_status']) # Verify content content = await (await dest_blob.download_blob()).content_as_bytes() self.assertEqual(self.source_blob_data, content) @record def test_copy_blob_sync_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_copy_blob_sync_async())
class StorageCPKAsyncTest(StorageTestCase): def setUp(self): super(StorageCPKAsyncTest, self).setUp() url = self._get_account_url() # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( url, credential=self.settings.STORAGE_ACCOUNT_KEY, connection_data_block_size=1024, max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') # prep some test data so that they can be used in upload tests self.byte_data = self.get_random_bytes(64 * 1024) if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete( self.bsc.create_container(self.container_name)) except: pass def tearDown(self): if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete( self.bsc.delete_container(self.container_name)) except: pass return super(StorageCPKAsyncTest, self).tearDown() # --Helpers----------------------------------------------------------------- def _get_blob_reference(self): return self.get_resource_name("cpk") async def _create_block_blob(self, blob_name=None, data=None, cpk=None, max_connections=1): blob_name = blob_name if blob_name else self._get_blob_reference() blob_client = self.bsc.get_blob_client(self.container_name, blob_name) data = data if data else b'' resp = await blob_client.upload_blob(data, cpk=cpk, max_connections=max_connections) return blob_client, resp async def _create_append_blob(self, cpk=None): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.create_append_blob(cpk=cpk) return blob async def _create_page_blob(self, cpk=None): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.create_page_blob(1024 * 1024, cpk=cpk) return blob # -- Test cases for APIs supporting CPK ---------------------------------------------- async def _test_put_block_and_put_block_list(self): # Arrange blob_client, _ = await self._create_block_blob() await blob_client.stage_block('1', b'AAA', cpk=TEST_ENCRYPTION_KEY) await blob_client.stage_block('2', b'BBB', cpk=TEST_ENCRYPTION_KEY) await blob_client.stage_block('3', b'CCC', cpk=TEST_ENCRYPTION_KEY) # Act block_list = [ BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3') ] put_block_list_resp = await blob_client.commit_block_list( block_list, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(put_block_list_resp['etag']) self.assertIsNotNone(put_block_list_resp['last_modified']) self.assertTrue(put_block_list_resp['request_server_encrypted']) self.assertEqual(put_block_list_resp['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), b'AAABBBCCC') self.assertEqual(blob.properties.etag, put_block_list_resp['etag']) self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified']) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) @record def test_put_block_and_put_block_list_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_and_put_block_list()) async def _test_create_block_blob_with_chunks(self): # parallel operation if TestMode.need_recording_file(self.test_mode): return # Arrange # to force the in-memory chunks to be used self.config.use_byte_buffer = True # Act # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = await self._create_block_blob( data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, max_connections=2) # Assert self.assertIsNotNone(upload_response['etag']) self.assertIsNotNone(upload_response['last_modified']) self.assertTrue(upload_response['request_server_encrypted']) self.assertEqual(upload_response['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), self.byte_data) self.assertEqual(blob.properties.etag, upload_response['etag']) self.assertEqual(blob.properties.last_modified, upload_response['last_modified']) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) def test_create_block_blob_with_chunks_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_block_blob_with_chunks()) async def _test_create_block_blob_with_sub_streams(self): # problem with the recording framework can only run live if TestMode.need_recording_file(self.test_mode): return # Act # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = await self._create_block_blob( data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, max_connections=2) # Assert self.assertIsNotNone(upload_response['etag']) self.assertIsNotNone(upload_response['last_modified']) self.assertTrue(upload_response['request_server_encrypted']) self.assertEqual(upload_response['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), self.byte_data) self.assertEqual(blob.properties.etag, upload_response['etag']) self.assertEqual(blob.properties.last_modified, upload_response['last_modified']) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) def test_create_block_blob_with_sub_streams_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_block_blob_with_sub_streams()) async def _test_create_block_blob_with_single_chunk(self): # Act data = b'AAABBBCCC' # create_blob_from_bytes forces the in-memory chunks to be used blob_client, upload_response = await self._create_block_blob( data=data, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(upload_response['etag']) self.assertIsNotNone(upload_response['last_modified']) self.assertTrue(upload_response['request_server_encrypted']) self.assertEqual(upload_response['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), data) self.assertEqual(blob.properties.etag, upload_response['etag']) self.assertEqual(blob.properties.last_modified, upload_response['last_modified']) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) @record def test_create_block_blob_with_single_chunk_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_block_blob_with_single_chunk()) async def _test_put_block_from_url_and_commit(self): # Arrange # create source blob and get source blob url source_blob_name = self.get_resource_name("sourceblob") self.config.use_byte_buffer = True # Make sure using chunk upload, then we can record the request source_blob_client, _ = await self._create_block_blob( blob_name=source_blob_name, data=self.byte_data) source_blob_sas = source_blob_client.generate_shared_access_signature( permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=1)) source_blob_url = source_blob_client.url + "?" + source_blob_sas # create destination blob self.config.use_byte_buffer = False destination_blob_client, _ = await self._create_block_blob( cpk=TEST_ENCRYPTION_KEY) # Act part 1: make put block from url calls await destination_blob_client.stage_block_from_url( block_id=1, source_url=source_blob_url, source_offset=0, source_length=4 * 1024 - 1, cpk=TEST_ENCRYPTION_KEY) await destination_blob_client.stage_block_from_url( block_id=2, source_url=source_blob_url, source_offset=4 * 1024, source_length=8 * 1024, cpk=TEST_ENCRYPTION_KEY) # Assert blocks committed, uncommitted = await destination_blob_client.get_block_list( 'all') self.assertEqual(len(uncommitted), 2) self.assertEqual(len(committed), 0) # commit the blocks without cpk should fail block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2')] with self.assertRaises(HttpResponseError): await destination_blob_client.commit_block_list(block_list) # Act commit the blocks with cpk should succeed put_block_list_resp = await destination_blob_client.commit_block_list( block_list, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(put_block_list_resp['etag']) self.assertIsNotNone(put_block_list_resp['last_modified']) self.assertTrue(put_block_list_resp['request_server_encrypted']) self.assertEqual(put_block_list_resp['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content blob = await destination_blob_client.download_blob( cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), self.byte_data[0:8 * 1024 + 1]) self.assertEqual(blob.properties.etag, put_block_list_resp['etag']) self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified']) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) @record def test_put_block_from_url_and_commit_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_from_url_and_commit()) async def _test_append_block(self): # Arrange blob_client = await self._create_append_blob(cpk=TEST_ENCRYPTION_KEY) # Act for content in [b'AAA', b'BBB', b'CCC']: append_blob_prop = await blob_client.append_block( content, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(append_blob_prop['etag']) self.assertIsNotNone(append_blob_prop['last_modified']) self.assertTrue(append_blob_prop['request_server_encrypted']) self.assertEqual(append_blob_prop['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), b'AAABBBCCC') self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) @record def test_append_block_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_block()) async def _test_append_block_from_url(self): # Arrange source_blob_name = self.get_resource_name("sourceblob") self.config.use_byte_buffer = True # chunk upload source_blob_client, _ = await self._create_block_blob( blob_name=source_blob_name, data=self.byte_data) source_blob_sas = source_blob_client.generate_shared_access_signature( permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=1)) source_blob_url = source_blob_client.url + "?" + source_blob_sas self.config.use_byte_buffer = False destination_blob_client = await self._create_append_blob( cpk=TEST_ENCRYPTION_KEY) # Act append_blob_prop = await destination_blob_client.append_block_from_url( source_blob_url, source_range_start=0, source_range_end=4 * 1024 - 1, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(append_blob_prop['etag']) self.assertIsNotNone(append_blob_prop['last_modified']) # TODO: verify that the swagger is correct, header wasn't added for the response # self.assertTrue(append_blob_prop['request_server_encrypted']) self.assertEqual(append_blob_prop['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await destination_blob_client.download_blob() # Act get the blob content blob = await destination_blob_client.download_blob( cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), self.byte_data[0:4 * 1024]) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) @record def test_append_block_from_url_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_append_block_from_url()) async def _test_create_append_blob_with_chunks(self): # Arrange blob_client = await self._create_append_blob(cpk=TEST_ENCRYPTION_KEY) # Act append_blob_prop = await blob_client.upload_blob( self.byte_data, blob_type=BlobType.AppendBlob, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(append_blob_prop['etag']) self.assertIsNotNone(append_blob_prop['last_modified']) self.assertTrue(append_blob_prop['request_server_encrypted']) self.assertEqual(append_blob_prop['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), self.byte_data) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) @record def test_create_append_blob_with_chunks_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_append_blob_with_chunks()) async def _test_update_page(self): # Arrange blob_client = await self._create_page_blob(cpk=TEST_ENCRYPTION_KEY) # Act page_blob_prop = await blob_client.upload_page( self.byte_data, start_range=0, end_range=len(self.byte_data) - 1, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(page_blob_prop['etag']) self.assertIsNotNone(page_blob_prop['last_modified']) self.assertTrue(page_blob_prop['request_server_encrypted']) self.assertEqual(page_blob_prop['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob( offset=0, length=len(self.byte_data) - 1, cpk=TEST_ENCRYPTION_KEY, ) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), self.byte_data) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) @record def test_update_page_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_update_page()) async def _test_update_page_from_url(self): # Arrange source_blob_name = self.get_resource_name("sourceblob") self.config.use_byte_buffer = True # Make sure using chunk upload, then we can record the request source_blob_client, _ = await self._create_block_blob( blob_name=source_blob_name, data=self.byte_data) source_blob_sas = source_blob_client.generate_shared_access_signature( permission=BlobPermissions.READ, expiry=datetime.utcnow() + timedelta(hours=1)) source_blob_url = source_blob_client.url + "?" + source_blob_sas self.config.use_byte_buffer = False blob_client = await self._create_page_blob(cpk=TEST_ENCRYPTION_KEY) # Act page_blob_prop = await blob_client.upload_pages_from_url( source_blob_url, range_start=0, range_end=len(self.byte_data) - 1, source_range_start=0, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(page_blob_prop['etag']) self.assertIsNotNone(page_blob_prop['last_modified']) self.assertTrue(page_blob_prop['request_server_encrypted']) # TODO: FIX SWAGGER # self.assertEqual(page_blob_prop['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob( offset=0, length=len(self.byte_data) - 1, cpk=TEST_ENCRYPTION_KEY, ) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), self.byte_data) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) @record def test_update_page_from_url_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_update_page_from_url()) async def _test_create_page_blob_with_chunks(self): if TestMode.need_recording_file(self.test_mode): return # Act blob_client = self.bsc.get_blob_client(self.container_name, self._get_blob_reference()) page_blob_prop = await blob_client.upload_blob( self.byte_data, blob_type=BlobType.PageBlob, max_connections=2, cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(page_blob_prop['etag']) self.assertIsNotNone(page_blob_prop['last_modified']) self.assertTrue(page_blob_prop['request_server_encrypted']) self.assertEqual(page_blob_prop['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash) # Act get the blob content without cpk should fail with self.assertRaises(HttpResponseError): await blob_client.download_blob() # Act get the blob content blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY) # Assert content was retrieved with the cpk self.assertEqual(await blob.content_as_bytes(), self.byte_data) self.assertEqual(blob.properties.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) def test_create_page_blob_with_chunks_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_page_blob_with_chunks()) async def _test_get_set_blob_metadata(self): # Arrange blob_client, _ = await self._create_block_blob(data=b'AAABBBCCC', cpk=TEST_ENCRYPTION_KEY) # Act without the encryption key should fail with self.assertRaises(HttpResponseError): await blob_client.get_blob_properties() # Act blob_props = await blob_client.get_blob_properties( cpk=TEST_ENCRYPTION_KEY) # Assert self.assertTrue(blob_props.server_encrypted) self.assertEqual(blob_props.encryption_key_sha256, TEST_ENCRYPTION_KEY.key_hash) # Act set blob properties metadata = {'hello': 'world', 'number': '42', 'UP': 'UPval'} with self.assertRaises(HttpResponseError): await blob_client.set_blob_metadata(metadata=metadata, ) await blob_client.set_blob_metadata(metadata=metadata, cpk=TEST_ENCRYPTION_KEY) # Assert blob_props = await blob_client.get_blob_properties( cpk=TEST_ENCRYPTION_KEY) md = blob_props.metadata self.assertEqual(3, len(md)) self.assertEqual(md['hello'], 'world') self.assertEqual(md['number'], '42') self.assertEqual(md['UP'], 'UPval') self.assertFalse('up' in md) @record def test_get_set_blob_metadata_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_get_set_blob_metadata()) async def _test_snapshot_blob(self): # Arrange blob_client, _ = await self._create_block_blob(data=b'AAABBBCCC', cpk=TEST_ENCRYPTION_KEY) # Act without cpk should not work with self.assertRaises(HttpResponseError): await blob_client.create_snapshot() # Act with cpk should work blob_snapshot = await blob_client.create_snapshot( cpk=TEST_ENCRYPTION_KEY) # Assert self.assertIsNotNone(blob_snapshot) @record def test_snapshot_blob_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_snapshot_blob())
class StorageGetBlobTestAsync(AsyncStorageTestCase): # --Helpers----------------------------------------------------------------- async def _setup(self, name, key): self.bsc = BlobServiceClient(self.account_url(name, "blob"), credential=key, max_single_get_size=32 * 1024, max_chunk_get_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') self.byte_blob = self.get_resource_name('byteblob') self.byte_data = self.get_random_bytes(64 * 1024 + 5) if self.is_live: container = self.bsc.get_container_client(self.container_name) try: await container.create_container() except: pass blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) await blob.upload_blob(self.byte_data, overwrite=True) def _teardown(self, file_name): if path.isfile(file_name): try: remove(file_name) except: pass def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) class NonSeekableFile(object): def __init__(self, wrapped_file): self.wrapped_file = wrapped_file def write(self, data): self.wrapped_file.write(data) def read(self, count): return self.wrapped_file.read(count) def seekable(self): return False # -- Get test cases for blobs ---------------------------------------------- @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_unicode_get_blob_unicode_data_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob_data = u'hello world啊齄丂狛狜'.encode('utf-8') blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Act content = await blob.download_blob() # Assert self.assertIsInstance(content.properties, BlobProperties) self.assertEqual(await content.readall(), blob_data) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_unicode_get_blob_binary_data_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) base64_data = 'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w==' binary_data = base64.b64decode(base64_data) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(binary_data) # Act content = await blob.download_blob() # Assert self.assertIsInstance(content.properties, BlobProperties) self.assertEqual(await content.readall(), binary_data) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_no_content_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob_data = b'' blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Act content = await blob.download_blob() # Assert self.assertEqual(blob_data, await content.readall()) self.assertEqual(0, content.properties.size) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_bytes_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act content = await (await blob.download_blob(max_concurrency=2)).readall() # Assert self.assertEqual(self.byte_data, content) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_to_bytes_with_single_byte_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act content = await (await blob.download_blob(offset=0, length=1)).readall() # Assert self.assertEqual(1, len(content)) self.assertEqual(self.byte_data[0], content[0]) # Act content = await (await blob.download_blob(offset=5, length=1)).readall() # Assert self.assertEqual(1, len(content)) self.assertEqual(self.byte_data[5], content[0]) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_to_bytes_with_zero_byte_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account.name, storage_account_key) blob_data = b'' blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Act # the get request should fail in this case since the blob is empty and yet there is a range specified with self.assertRaises(HttpResponseError) as e: await blob.download_blob(offset=0, length=5) self.assertEqual(StorageErrorCode.invalid_range, e.exception.error_code) with self.assertRaises(HttpResponseError) as e: await blob.download_blob(offset=3, length=5) self.assertEqual(StorageErrorCode.invalid_range, e.exception.error_code) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_with_missing_start_range_async( self, resource_group, location, storage_account, storage_account_key): await self._setup(storage_account.name, storage_account_key) blob_data = b'foobar' blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Act # the get request should fail fast in this case since start_range is missing while end_range is specified with self.assertRaises(ValueError): await blob.download_blob(length=3) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_bytes_snapshot_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) snapshot_ref = await blob.create_snapshot() snapshot = self.bsc.get_blob_client(self.container_name, self.byte_blob, snapshot=snapshot_ref) await blob.upload_blob( self.byte_data, overwrite=True) # Modify the blob so the Etag no longer matches # Act content = await (await snapshot.download_blob(max_concurrency=2)).readall() # Assert self.assertEqual(self.byte_data, content) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_bytes_with_progress_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) progress = [] blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act content = await (await blob.download_blob(raw_response_hook=callback, max_concurrency=2)).readall() # Assert self.assertEqual(self.byte_data, content) self.assert_download_progress(len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_bytes_non_parallel_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) progress = [] blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act content = await (await blob.download_blob(raw_response_hook=callback, max_concurrency=1)).readall() # Assert self.assertEqual(self.byte_data, content) self.assert_download_progress(len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_bytes_small_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob_data = self.get_random_bytes(1024) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act content = await (await blob.download_blob(raw_response_hook=callback )).readall() # Assert self.assertEqual(blob_data, content) self.assert_download_progress(len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_stream_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act FILE_PATH = 'get_blob_to_stream_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, len(self.byte_data)) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(self.byte_data, actual) self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_stream_with_progress_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) progress = [] blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act FILE_PATH = 'blob_to_stream_with_progress_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(raw_response_hook=callback, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, len(self.byte_data)) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(self.byte_data, actual) self.assert_download_progress(len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) self._teardown(FILE_PATH) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_stream_non_parallel_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) progress = [] blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act FILE_PATH = 'blob_to_stream_non_parallel_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(raw_response_hook=callback, max_concurrency=1) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, len(self.byte_data)) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(self.byte_data, actual) self.assert_download_progress(len(self.byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) self._teardown(FILE_PATH) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_stream_small_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob_data = self.get_random_bytes(1024) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act FILE_PATH = 'blob_to_stream_small_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(raw_response_hook=callback, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, 1024) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(blob_data, actual) self.assert_download_progress(len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_to_path_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act end_range = self.config.max_single_get_size FILE_PATH = 'ranged_get_blob_to_path_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(offset=1, length=end_range - 1, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, end_range - 1) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(self.byte_data[1:end_range], actual) self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_to_path_with_progress_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) progress = [] blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act start_range = 3 end_range = self.config.max_single_get_size + 1024 FILE_PATH = 'get_blob_to_path_with_progress_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(offset=start_range, length=end_range, raw_response_hook=callback, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, self.config.max_single_get_size + 1024) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual( self.byte_data[start_range:end_range + start_range], actual) self.assert_download_progress(end_range, self.config.max_chunk_get_size, self.config.max_single_get_size, progress) self._teardown(FILE_PATH) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_to_path_small_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act FILE_PATH = 'get_blob_to_path_small_asyncc.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(offset=1, length=4, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, 4) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(self.byte_data[1:5], actual) self._teardown(FILE_PATH) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_to_path_non_parallel_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act FILE_PATH = 'granged_get_blob_to_path_non_parallel_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(offset=1, length=3, max_concurrency=1) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, 3) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(self.byte_data[1:4], actual) self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_to_path_invalid_range_parallel_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_size = self.config.max_single_get_size + 1 blob_data = self.get_random_bytes(blob_size) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Act FILE_PATH = 'path_invalid_range_parallel_async.temp.dat' end_range = 2 * self.config.max_single_get_size with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(offset=1, length=end_range, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, blob_size) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(blob_data[1:blob_size], actual) self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_ranged_get_blob_to_path_invalid_range_non_parallel_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_size = 1024 blob_data = self.get_random_bytes(blob_size) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) # Act end_range = 2 * self.config.max_single_get_size FILE_PATH = 'path_invalid_range_non_parallel_asy.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(offset=1, length=end_range, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, blob_size) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(blob_data[1:blob_size], actual) self._teardown(FILE_PATH) # Assert @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_text_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) text_blob = self.get_resource_name('textblob') text_data = self.get_random_text_data(self.config.max_single_get_size + 1) blob = self.bsc.get_blob_client(self.container_name, text_blob) await blob.upload_blob(text_data) # Act stream = await blob.download_blob(max_concurrency=2, encoding='UTF-8') content = await stream.readall() # Assert self.assertEqual(text_data, content) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_text_with_progress_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) text_blob = self.get_resource_name('textblob') text_data = self.get_random_text_data(self.config.max_single_get_size + 1) blob = self.bsc.get_blob_client(self.container_name, text_blob) await blob.upload_blob(text_data) progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act stream = await blob.download_blob(raw_response_hook=callback, max_concurrency=2, encoding='UTF-8') content = await stream.readall() # Assert self.assertEqual(text_data, content) self.assert_download_progress(len(text_data.encode('utf-8')), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_text_non_parallel_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) text_blob = self._get_blob_reference() text_data = self.get_random_text_data(self.config.max_single_get_size + 1) blob = self.bsc.get_blob_client(self.container_name, text_blob) await blob.upload_blob(text_data) progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act stream = await blob.download_blob(raw_response_hook=callback, max_concurrency=1, encoding='UTF-8') content = await stream.readall() # Assert self.assertEqual(text_data, content) self.assert_download_progress(len(text_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_text_small_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob_data = self.get_random_text_data(1024) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(blob_data) progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act stream = await blob.download_blob(raw_response_hook=callback, encoding='UTF-8') content = await stream.readall() # Assert self.assertEqual(blob_data, content) self.assert_download_progress(len(blob_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_text_with_encoding_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) text = u'hello 啊齄丂狛狜 world' blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(text, encoding='utf-16') # Act stream = await blob.download_blob(encoding='utf-16') content = await stream.readall() # Assert self.assertEqual(text, content) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_text_with_encoding_and_progress_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) text = u'hello 啊齄丂狛狜 world' blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(text, encoding='utf-16') # Act progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) stream = await blob.download_blob(raw_response_hook=callback, encoding='utf-16') content = await stream.readall() # Assert self.assertEqual(text, content) self.assert_download_progress(len(text.encode('utf-8')), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_non_seekable_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act FILE_PATH = 'get_blob_non_seekable_async.temp.dat' with open(FILE_PATH, 'wb') as stream: non_seekable_stream = StorageGetBlobTestAsync.NonSeekableFile( stream) downloader = await blob.download_blob(max_concurrency=1) read_bytes = await downloader.readinto(non_seekable_stream) # Assert self.assertEqual(read_bytes, len(self.byte_data)) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(self.byte_data, actual) self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_non_seekable_parallel_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act FILE_PATH = 'et_blob_non_seekable_parallel_asyn.temp.dat' with open(FILE_PATH, 'wb') as stream: non_seekable_stream = StorageGetBlobTestAsync.NonSeekableFile( stream) with self.assertRaises(ValueError): downloader = await blob.download_blob(max_concurrency=2) properties = await downloader.readinto(non_seekable_stream) self._teardown(FILE_PATH) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_stream_exact_get_size_async( self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() byte_data = self.get_random_bytes(self.config.max_single_get_size) blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(byte_data) progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act FILE_PATH = 'stream_exact_get_size_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(raw_response_hook=callback, max_concurrency=2) properties = await downloader.readinto(stream) # Assert with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(byte_data, actual) self.assert_download_progress(len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) self._teardown(FILE_PATH) @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_exact_get_size_async(self, resource_group, location, storage_account, storage_account_key): # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() byte_data = self.get_random_bytes(self.config.max_single_get_size) blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(byte_data) progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act content = await (await blob.download_blob(raw_response_hook=callback )).readall() # Assert self.assertEqual(byte_data, content) self.assert_download_progress(len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_exact_chunk_size_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob_name = self._get_blob_reference() byte_data = self.get_random_bytes(self.config.max_single_get_size + self.config.max_chunk_get_size) blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(byte_data) progress = [] def callback(response): current = response.context['download_stream_current'] total = response.context['data_stream_total'] progress.append((current, total)) # Act content = await (await blob.download_blob(raw_response_hook=callback )).readall() # Assert self.assertEqual(byte_data, content) self.assert_download_progress(len(byte_data), self.config.max_chunk_get_size, self.config.max_single_get_size, progress) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_to_stream_with_md5_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act FILE_PATH = 'lob_to_stream_with_md5_asyncc.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(validate_content=True, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, len(self.byte_data)) with open(FILE_PATH, 'rb') as stream: actual = stream.read() self.assertEqual(self.byte_data, actual) self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_with_md5_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) # Act content = await (await blob.download_blob(validate_content=True, max_concurrency=2)).readall() # Assert self.assertEqual(self.byte_data, content) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_to_stream_with_overall_md5_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live # Arrange await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) props = await blob.get_blob_properties() props.content_settings.content_md5 = b'MDAwMDAwMDA=' await blob.set_http_headers(props.content_settings) # Act FILE_PATH = 'range_to_stream_with_overall_md5_async.temp.dat' with open(FILE_PATH, 'wb') as stream: downloader = await blob.download_blob(offset=0, length=1024, validate_content=True, max_concurrency=2) read_bytes = await downloader.readinto(stream) # Assert self.assertEqual(read_bytes, 1024) self.assertEqual(b'MDAwMDAwMDA=', downloader.properties.content_settings.content_md5) self.assertEqual(downloader.size, 1024) self._teardown(FILE_PATH) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_with_overall_md5_async( self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) content = await blob.download_blob(offset=0, length=1024, validate_content=True) # Arrange props = await blob.get_blob_properties() props.content_settings.content_md5 = b'MDAwMDAwMDA=' await blob.set_http_headers(props.content_settings) # Act content = await blob.download_blob(offset=0, length=1024, validate_content=True) # Assert self.assertEqual(b'MDAwMDAwMDA=', content.properties.content_settings.content_md5) @pytest.mark.live_test_only @GlobalStorageAccountPreparer() @AsyncStorageTestCase.await_prepared_test async def test_get_blob_range_with_range_md5_async(self, resource_group, location, storage_account, storage_account_key): # parallel tests introduce random order of requests, can only run live await self._setup(storage_account.name, storage_account_key) blob = self.bsc.get_blob_client(self.container_name, self.byte_blob) content = await blob.download_blob(offset=0, length=1024, validate_content=True) # Arrange props = await blob.get_blob_properties() props.content_settings.content_md5 = None await blob.set_http_headers(props.content_settings) # Act content = await blob.download_blob(offset=0, length=1024, validate_content=True) # Assert self.assertIsNotNone(content.properties.content_settings.content_type) self.assertIsNone(content.properties.content_settings.content_md5) self.assertEqual(content.properties.size, 1024) # ------------------------------------------------------------------------------
class StorageLargestBlockBlobTestAsync(AsyncStorageTestCase): async def _setup(self, storage_account_name, key, additional_policies=None, min_large_block_upload_threshold=1 * 1024 * 1024, max_single_put_size=32 * 1024): self.bsc = BlobServiceClient( self.account_url(storage_account_name, "blob"), credential=key, max_single_put_size=max_single_put_size, max_block_size=LARGEST_BLOCK_SIZE, min_large_block_upload_threshold=min_large_block_upload_threshold, _additional_pipeline_policies=additional_policies, transport=AiohttpTestTransport( connection_timeout=CONNECTION_TIMEOUT, read_timeout=READ_TIMEOUT)) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') self.container_name = self.container_name + str(uuid.uuid4()) if self.is_live: await self.bsc.create_container(self.container_name) def _teardown(self, file_name): if path.isfile(file_name): try: remove(file_name) except: pass # --Helpers----------------------------------------------------------------- def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) async def _create_blob(self): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(b'') return blob # --Test cases for block blobs -------------------------------------------- @pytest.mark.live_test_only @pytest.mark.skip(reason="This takes really long time") @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_bytes_largest(self, storage_account_name, storage_account_key): await self._setup(storage_account_name, storage_account_key) blob = await self._create_blob() # Act data = urandom(LARGEST_BLOCK_SIZE) blockId = str(uuid.uuid4()).encode('utf-8') resp = await blob.stage_block(blockId, data, length=LARGEST_BLOCK_SIZE) await blob.commit_block_list([BlobBlock(blockId)]) block_list = await blob.get_block_list() # Assert self.assertIsNotNone(resp) assert 'content_md5' in resp assert 'content_crc64' in resp assert 'request_id' in resp self.assertIsNotNone(block_list) self.assertEqual(len(block_list), 2) self.assertEqual(len(block_list[1]), 0) self.assertEqual(len(block_list[0]), 1) self.assertEqual(block_list[0][0].size, LARGEST_BLOCK_SIZE) @pytest.mark.live_test_only @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_bytes_largest_without_network( self, storage_account_name, storage_account_key): payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential( storage_account_name, storage_account_key) await self._setup(storage_account_name, storage_account_key, [payload_dropping_policy, credential_policy]) blob = await self._create_blob() # Act data = urandom(LARGEST_BLOCK_SIZE) blockId = str(uuid.uuid4()).encode('utf-8') resp = await blob.stage_block(blockId, data, length=LARGEST_BLOCK_SIZE) await blob.commit_block_list([BlobBlock(blockId)]) block_list = await blob.get_block_list() # Assert self.assertIsNotNone(resp) assert 'content_md5' in resp assert 'content_crc64' in resp assert 'request_id' in resp self.assertIsNotNone(block_list) self.assertEqual(len(block_list), 2) self.assertEqual(len(block_list[1]), 0) self.assertEqual(len(block_list[0]), 1) self.assertEqual(payload_dropping_policy.put_block_counter, 1) self.assertEqual(payload_dropping_policy.put_block_sizes[0], LARGEST_BLOCK_SIZE) @pytest.mark.live_test_only @pytest.mark.skip(reason="This takes really long time") @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_stream_largest(self, storage_account_name, storage_account_key): await self._setup(storage_account_name, storage_account_key) blob = await self._create_blob() # Act stream = LargeStream(LARGEST_BLOCK_SIZE) blockId = str(uuid.uuid4()) requestId = str(uuid.uuid4()) resp = await blob.stage_block(blockId, stream, length=LARGEST_BLOCK_SIZE, client_request_id=requestId) await blob.commit_block_list([BlobBlock(blockId)]) block_list = await blob.get_block_list() # Assert self.assertIsNotNone(resp) assert 'content_md5' in resp assert 'content_crc64' in resp assert 'request_id' in resp self.assertIsNotNone(block_list) self.assertEqual(len(block_list), 2) self.assertEqual(len(block_list[1]), 0) self.assertEqual(len(block_list[0]), 1) self.assertEqual(block_list[0][0].size, LARGEST_BLOCK_SIZE) @pytest.mark.live_test_only @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_put_block_stream_largest_without_network( self, storage_account_name, storage_account_key): payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential( storage_account_name, storage_account_key) await self._setup(storage_account_name, storage_account_key, [payload_dropping_policy, credential_policy]) blob = await self._create_blob() # Act stream = LargeStream(LARGEST_BLOCK_SIZE) blockId = str(uuid.uuid4()) requestId = str(uuid.uuid4()) resp = await blob.stage_block(blockId, stream, length=LARGEST_BLOCK_SIZE, client_request_id=requestId) await blob.commit_block_list([BlobBlock(blockId)]) block_list = await blob.get_block_list() # Assert self.assertIsNotNone(resp) assert 'content_md5' in resp assert 'content_crc64' in resp assert 'request_id' in resp self.assertIsNotNone(block_list) self.assertEqual(len(block_list), 2) self.assertEqual(len(block_list[1]), 0) self.assertEqual(len(block_list[0]), 1) self.assertEqual(payload_dropping_policy.put_block_counter, 1) self.assertEqual(payload_dropping_policy.put_block_sizes[0], LARGEST_BLOCK_SIZE) @pytest.mark.live_test_only @pytest.mark.skip(reason="This takes really long time") @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_largest_blob_from_path(self, storage_account_name, storage_account_key): await self._setup(storage_account_name, storage_account_key) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) FILE_PATH = 'largest_blob_from_path.temp.{}.dat'.format( str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: largeStream = LargeStream(LARGEST_BLOCK_SIZE, 100 * 1024 * 1024) chunk = largeStream.read() while chunk: stream.write(chunk) chunk = largeStream.read() # Act with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2) # Assert self._teardown(FILE_PATH) @pytest.mark.live_test_only @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_largest_blob_from_path_without_network( self, storage_account_name, storage_account_key): payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential( storage_account_name, storage_account_key) await self._setup(storage_account_name, storage_account_key, [payload_dropping_policy, credential_policy]) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) FILE_PATH = 'largest_blob_from_path.temp.{}.dat'.format( str(uuid.uuid4())) with open(FILE_PATH, 'wb') as stream: largeStream = LargeStream(LARGEST_BLOCK_SIZE, 100 * 1024 * 1024) chunk = largeStream.read() while chunk: stream.write(chunk) chunk = largeStream.read() # Act with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2) # Assert self._teardown(FILE_PATH) self.assertEqual(payload_dropping_policy.put_block_counter, 1) self.assertEqual(payload_dropping_policy.put_block_sizes[0], LARGEST_BLOCK_SIZE) @pytest.mark.skip(reason="This takes really long time") @pytest.mark.live_test_only @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_largest_blob_from_stream_without_network( self, storage_account_name, storage_account_key): payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential( storage_account_name, storage_account_key) await self._setup(storage_account_name, storage_account_key, [payload_dropping_policy, credential_policy]) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) number_of_blocks = 50000 stream = LargeStream(LARGEST_BLOCK_SIZE * number_of_blocks) # Act await blob.upload_blob(stream, max_concurrency=1) # Assert self.assertEqual(payload_dropping_policy.put_block_counter, number_of_blocks) self.assertEqual(payload_dropping_policy.put_block_sizes[0], LARGEST_BLOCK_SIZE) @pytest.mark.live_test_only @BlobPreparer() @AsyncStorageTestCase.await_prepared_test async def test_create_largest_blob_from_stream_single_upload_without_network( self, storage_account_name, storage_account_key): payload_dropping_policy = PayloadDroppingPolicy() credential_policy = _format_shared_key_credential( storage_account_name, storage_account_key) await self._setup(storage_account_name, storage_account_key, [payload_dropping_policy, credential_policy], max_single_put_size=LARGEST_SINGLE_UPLOAD_SIZE + 1) blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) stream = LargeStream(LARGEST_SINGLE_UPLOAD_SIZE) # Act await blob.upload_blob(stream, length=LARGEST_SINGLE_UPLOAD_SIZE, max_concurrency=1) # Assert self.assertEqual(payload_dropping_policy.put_block_counter, 0) self.assertEqual(payload_dropping_policy.put_blob_counter, 1)
async def play(request): """Renders the video player view.""" video_uuid = request.query_params["video_uuid"] github_user_id = request.scope["auth"]["id"] blob_service_client = BlobServiceClient( account_url=f"https://{os.getenv('AZURE_STORAGE_ACCOUNT')}.blob.core.windows.net/", credential=os.getenv("AZURE_STORAGE_KEY"), ) blob_client = blob_service_client.get_blob_client( os.getenv("AZURE_STORAGE_VIDEO_CONTAINER"), video_uuid ) try: blob_properties = await blob_client.get_blob_properties() except: return templates.TemplateResponse( "layout/error.html", { "request": request, "error_code": "404", "error_message": "Video not found", }, ) metadata = blob_properties.metadata created_at = blob_properties.creation_time video_indexer = app.state.video_indexer async with await video_indexer.get_video_id_by_external_id(video_uuid) as response: video_id = await response.json() async with await video_indexer.get_video_index(video_id) as response: video_details = await response.json() state = video_details["state"] if state == "Processing": return templates.TemplateResponse( "layout/error.html", { "request": request, "error_code": "204", "error_message": "Video not available yet - still processing, check back shortly.", }, ) else: if metadata["uploader_id"] == github_user_id: allow_edit = True else: allow_edit = False async with await video_indexer.get_video_access_token( video_id, allow_edit ) as response: video_access_token = await response.json() player_widget_url = await video_indexer.get_video_player_widget_url( video_id, video_access_token ) insights_widget_url = await video_indexer.get_video_insights_widget_url( video_id, video_access_token, allow_edit ) return templates.TemplateResponse( "play.html", { "request": request, "player_widget_url": player_widget_url, "insights_widget_url": insights_widget_url, "title": metadata["title"], "uploader_username": metadata["uploader_username"], "badge": metadata["badge"], "description": metadata["description"], "elapsed_time": date_funcs.elapsed_time_str(created_at), }, )
class StorageLargeBlockBlobTestAsync(StorageTestCase): def setUp(self): super(StorageLargeBlockBlobTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() # test chunking functionality by reducing the threshold # for chunking and the size of each chunk, otherwise # the tests would take too long to execute self.bsc = BlobServiceClient(url, credential=credential, max_single_put_size=32 * 1024, max_block_size=2 * 1024 * 1024, min_large_block_upload_threshold=1 * 1024 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') def tearDown(self): if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete( self.bsc.delete_container(self.container_name)) except: pass if os.path.isfile(FILE_PATH): try: os.remove(FILE_PATH) except: pass return super(StorageLargeBlockBlobTestAsync, self).tearDown() # --Helpers----------------------------------------------------------------- async def _setup(self): if not self.is_playback(): try: await self.bsc.create_container(self.container_name) except: pass def _get_blob_reference(self): return self.get_resource_name(TEST_BLOB_PREFIX) async def _create_blob(self): blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(b'') return blob async def assertBlobEqual(self, container_name, blob_name, expected_data): blob = self.bsc.get_blob_client(container_name, blob_name) actual_data = await blob.download_blob() actual_bytes = b"" async for data in actual_data: actual_bytes += data self.assertEqual(actual_bytes, expected_data) # --Test cases for block blobs -------------------------------------------- async def _test_put_block_bytes_large_async(self): if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob = await self._create_blob() # Act futures = [] for i in range(5): futures.append( blob.stage_block('block {0}'.format(i).encode('utf-8'), os.urandom(LARGE_BLOCK_SIZE))) await asyncio.gather(*futures) # Assert @record def test_put_block_bytes_large_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_bytes_large_async()) async def _test_put_block_bytes_large_with_md5_async(self): if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob = await self._create_blob() # Act for i in range(5): resp = await blob.stage_block( 'block {0}'.format(i).encode('utf-8'), os.urandom(LARGE_BLOCK_SIZE), validate_content=True) self.assertIsNone(resp) @record def test_put_block_bytes_large_with_md5_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_put_block_bytes_large_with_md5_async()) async def _test_put_block_stream_large_async(self): if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob = await self._create_blob() # Act for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) resp = await blob.stage_block( 'block {0}'.format(i).encode('utf-8'), stream, length=LARGE_BLOCK_SIZE) self.assertIsNone(resp) # Assert @record def test_put_block_stream_large_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_put_block_stream_large_async()) async def _test_put_block_stream_large_with_md5_async(self): if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob = await self._create_blob() # Act for i in range(5): stream = BytesIO(bytearray(LARGE_BLOCK_SIZE)) resp = resp = await blob.stage_block( 'block {0}'.format(i).encode('utf-8'), stream, length=LARGE_BLOCK_SIZE, validate_content=True) self.assertIsNone(resp) # Assert @record def test_put_block_stream_large_with_md5_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_put_block_stream_large_with_md5_async()) async def _test_create_large_blob_from_path_async(self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) @record def test_create_large_blob_from_path_async(self): loop = asyncio.get_event_loop() loop.run_until_complete(self._test_create_large_blob_from_path_async()) async def _test_create_large_blob_from_path_with_md5_async(self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, validate_content=True, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) @record def test_create_large_blob_from_path_with_md5_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_large_blob_from_path_with_md5_async()) async def _test_create_large_blob_from_path_non_parallel_async(self): if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(self.get_random_bytes(100)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=1) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) @record def test_create_large_blob_from_path_non_parallel_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_large_blob_from_path_non_parallel_async()) async def _test_create_large_blob_from_path_with_progress_async(self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2, raw_response_hook=callback) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_create_large_blob_from_path_with_progress_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_large_blob_from_path_with_progress_async()) async def _test_create_large_blob_from_path_with_properties_async(self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, content_settings=content_settings, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) @record def test_create_large_blob_from_path_with_properties_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_large_blob_from_path_with_properties_async()) async def _test_create_large_blob_from_stream_chunked_upload_async(self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) @record def test_create_large_blob_from_stream_chunked_upload_async(self): loop = asyncio.get_event_loop() loop.run_until_complete( self._test_create_large_blob_from_stream_chunked_upload_async()) async def _test_create_large_blob_from_stream_with_progress_chunked_upload_async( self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act progress = [] def callback(response): current = response.context['upload_stream_current'] total = response.context['data_stream_total'] if current is not None: progress.append((current, total)) with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, max_concurrency=2, raw_response_hook=callback) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) self.assert_upload_progress(len(data), self.config.max_block_size, progress) @record def test_create_large_blob_from_stream_with_progress_chunked_upload_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_create_large_blob_from_stream_with_progress_chunked_upload_async( )) async def _test_create_large_blob_from_stream_chunked_upload_with_count_async( self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, length=blob_size, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) @record def test_create_large_blob_from_stream_chunked_upload_with_count_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_create_large_blob_from_stream_chunked_upload_with_count_async( )) async def _test_create_large_blob_from_stream_chunked_upload_with_count_and_properties_async( self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') blob_size = len(data) - 301 with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, length=blob_size, content_settings=content_settings, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data[:blob_size]) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) @record def test_create_large_blob_from_stream_chunked_upload_with_count_and_properties_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_create_large_blob_from_stream_chunked_upload_with_count_and_properties_async( )) async def _test_create_large_blob_from_stream_chunked_upload_with_properties_async( self): # parallel tests introduce random order of requests, can only run live if TestMode.need_recording_file(self.test_mode): return # Arrange await self._setup() blob_name = self._get_blob_reference() blob = self.bsc.get_blob_client(self.container_name, blob_name) data = bytearray(os.urandom(LARGE_BLOB_SIZE)) with open(FILE_PATH, 'wb') as stream: stream.write(data) # Act content_settings = ContentSettings(content_type='image/png', content_language='spanish') with open(FILE_PATH, 'rb') as stream: await blob.upload_blob(stream, content_settings=content_settings, max_concurrency=2) # Assert await self.assertBlobEqual(self.container_name, blob_name, data) properties = await blob.get_blob_properties() self.assertEqual(properties.content_settings.content_type, content_settings.content_type) self.assertEqual(properties.content_settings.content_language, content_settings.content_language) @record def test_create_large_blob_from_stream_chunked_upload_with_properties_async( self): loop = asyncio.get_event_loop() loop.run_until_complete( self. _test_create_large_blob_from_stream_chunked_upload_with_properties_async( ))