async def test_retry_put_block_with_non_seekable_stream_fail_async( self, resource_group, location, storage_account, storage_account_key): if not self.is_live: pytest.skip("live only") # Arrange bsc = BlobServiceClient(self._account_url(storage_account.name), credential=storage_account_key, retry_policy=self.retry, transport=AiohttpTestTransport()) await self._setup(bsc) blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = self.NonSeekableStream(BytesIO(data)) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = bsc.get_blob_client(self.container_name, blob_name) with self.assertRaises(HttpResponseError) as error: await blob.stage_block( 1, data_stream, length=PUT_BLOCK_SIZE, raw_response_hook=responder.override_first_status) # Assert self.assertEqual(error.exception.response.status_code, 408)
async def test_create_page_blob_with_chunks(self, resource_group, location, storage_account, storage_account_key): # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute bsc = BlobServiceClient( self.account_url(storage_account, "blob"), storage_account_key, max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024, transport=AiohttpTestTransport(connection_data_block_size=1024)) await self._setup(bsc) # Act blob_client = bsc.get_blob_client(self.container_name, self._get_blob_reference()) page_blob_prop = await blob_client.upload_blob(self.byte_data, blob_type=BlobType.PageBlob, max_concurrency=2, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) # Assert self.assertIsNotNone(page_blob_prop['etag']) self.assertIsNotNone(page_blob_prop['last_modified']) self.assertTrue(page_blob_prop['request_server_encrypted']) self.assertEqual(page_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE) # Act get the blob content blob = await blob_client.download_blob() # Assert content was retrieved with the cpk self.assertEqual(await blob.readall(), self.byte_data) self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
async def test_create_container_with_default_cpk_n(self, resource_group, location, storage_account, storage_account_key): # Arrange bsc = BlobServiceClient( self.account_url(storage_account, "blob"), credential=storage_account_key, connection_data_block_size=1024, max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024) container_client = await bsc.create_container( 'asynccpkcontainer', container_encryption_scope=TEST_CONTAINER_ENCRYPTION_KEY_SCOPE) container_props = await container_client.get_container_properties() self.assertEqual( container_props.encryption_scope.default_encryption_scope, TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope) self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, False) async for container in bsc.list_containers(name_starts_with='asynccpkcontainer'): self.assertEqual( container_props.encryption_scope.default_encryption_scope, TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope) self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, False) blob_client = container_client.get_blob_client("appendblob") # providing encryption scope when upload the blob resp = await blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) # Use the provided encryption scope on the blob self.assertEqual(resp['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE) await container_client.delete_container()
def setUp(self): super(BlobStorageAccountTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() self.bsc = BlobServiceClient(url, credential=credential, transport=AiohttpTestTransport()) self.container_name = self.get_resource_name('utcontainer')
async def test_ors_source(self, resource_group, location, storage_account, storage_account_key): # Arrange bsc = BlobServiceClient( self.account_url(storage_account, "blob"), credential=storage_account_key, transport=AiohttpTestTransport(connection_data_block_size=1024)) blob = bsc.get_blob_client(container=self.SRC_CONTAINER, blob=self.BLOB_NAME) # Act props = await blob.get_blob_properties() # Assert self.assertIsInstance(props, BlobProperties) self.assertIsNotNone(props.object_replication_source_properties) for replication_policy in props.object_replication_source_properties: self.assertNotEqual(replication_policy.policy_id, '') self.assertIsNotNone(replication_policy.rules) for rule in replication_policy.rules: self.assertNotEqual(rule.rule_id, '') self.assertIsNotNone(rule.status) self.assertNotEqual(rule.status, '') # Check that the download function gives back the same result stream = await blob.download_blob() self.assertEqual(stream.properties.object_replication_source_properties, props.object_replication_source_properties)
async def test_list_blobs(self, resource_group, location, storage_account, storage_account_key): # Arrange bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=storage_account_key, connection_data_block_size=1024, max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024) await self._setup(bsc) blob_client, _ = await self._create_block_blob( bsc, blob_name="blockblob", data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) await self._create_append_blob( bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE) container_client = bsc.get_container_client(self.container_name) generator = container_client.list_blobs(include="metadata") async for blob in generator: self.assertIsNotNone(blob) # Assert: every listed blob has encryption_scope self.assertEqual(blob.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE) self._teardown(bsc)
async def test_retry_put_block_with_non_seekable_stream_async(self, storage_account_name, storage_account_key): # Arrange bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, retry_policy=self.retry, transport=AiohttpTestTransport()) await self._setup(bsc) blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = self.NonSeekableStream(BytesIO(data)) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = bsc.get_blob_client(self.container_name, blob_name) # Note: put_block transforms non-seekable streams into byte arrays before handing it off to the executor await blob.stage_block(1, data_stream, raw_response_hook=responder.override_first_status) # Assert _, uncommitted_blocks = await blob.get_block_list( block_list_type="uncommitted", raw_response_hook=responder.override_first_status) self.assertEqual(len(uncommitted_blocks), 1) self.assertEqual(uncommitted_blocks[0].size, PUT_BLOCK_SIZE) # Commit block and verify content await blob.commit_block_list(['1'], raw_response_hook=responder.override_first_status) # Assert content = await (await blob.download_blob()).readall() self.assertEqual(content, data)
async def test_retry_put_block_with_seekable_stream_async(self, resource_group, location, storage_account, storage_account_key): pytest.skip("Aiohttp closes stream after request - cannot rewind.") # Arrange bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key, retry_policy=self.retry, transport=AiohttpTestTransport()) await self._setup(bsc) blob_name = self.get_resource_name('blob') data = self.get_random_bytes(PUT_BLOCK_SIZE) data_stream = BytesIO(data) # rig the response so that it fails for a single time responder = ResponseCallback(status=201, new_status=408) # Act blob = bsc.get_blob_client(self.container_name, blob_name) await blob.stage_block(1, data_stream, raw_response_hook=responder.override_first_status) # Assert _, uncommitted_blocks = await blob.get_block_list( block_list_type="uncommitted", raw_response_hook=responder.override_first_status) self.assertEqual(len(uncommitted_blocks), 1) self.assertEqual(uncommitted_blocks[0].size, PUT_BLOCK_SIZE) # Commit block and verify content await blob.commit_block_list(['1'], raw_response_hook=responder.override_first_status) # Assert content = await (await blob.download_blob()).readall() self.assertEqual(content, data)
def setUp(self): super(StorageBlockBlobTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( url, credential=credential, connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') # create source blob to be copied from self.source_blob_name = self.get_resource_name('srcblob') self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE) blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name) # generate a SAS so that it is accessible with a URL sas_token = blob.generate_shared_access_signature( permission=BlobSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url
def setUp(self): super(StorageCPKAsyncTest, self).setUp() url = self._get_account_url() # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( url, credential=self.settings.STORAGE_ACCOUNT_KEY, connection_data_block_size=1024, max_single_put_size=1024, min_large_block_upload_threshold=1024, max_block_size=1024, max_page_size=1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer') # prep some test data so that they can be used in upload tests self.byte_data = self.get_random_bytes(64 * 1024) if not self.is_playback(): loop = asyncio.get_event_loop() try: loop.run_until_complete( self.bsc.create_container(self.container_name)) except: pass
async def test_sas_signature_is_scrubbed_off(self, storage_account_name, storage_account_key): # Test can only run live bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key) await self._setup(bsc) # Arrange container = bsc.get_container_client(self.container_name) token = generate_container_sas( container.account_name, container.container_name, account_key=container.credential.account_key, permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) # parse out the signed signature token_components = parse_qs(token) signed_signature = quote( token_components[QueryStringConstants.SIGNED_SIGNATURE][0]) sas_service = ContainerClient.from_container_url(container.url, credential=token) # Act with LogCaptured(self) as log_captured: await sas_service.get_account_information(logging_enable=True) log_as_str = log_captured.getvalue() # Assert # make sure the query parameter 'sig' is logged, but its value is not self.assertTrue( QueryStringConstants.SIGNED_SIGNATURE in log_as_str) self.assertFalse(signed_signature in log_as_str)
async def test_logging_request_and_response_body(self, storage_account_name, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), storage_account_key, transport=AiohttpTestTransport(), logging_enable=True) await self._setup(bsc) # Arrange container = bsc.get_container_client(self.container_name) request_body = 'testloggingbody' blob_name = self.get_resource_name("testloggingblob") blob_client = container.get_blob_client(blob_name) await blob_client.upload_blob(request_body, overwrite=True) # Act with LogCaptured(self) as log_captured: await blob_client.download_blob() log_as_str = log_captured.getvalue() self.assertFalse(request_body in log_as_str) with LogCaptured(self) as log_captured: await blob_client.upload_blob(request_body, overwrite=True, logging_body=True) log_as_str = log_captured.getvalue() self.assertTrue(request_body in log_as_str) self.assertEqual(log_as_str.count(request_body), 1)
async def test_standard_blob_tier_set_tier_api(self, storage_account_name, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, transport=AiohttpTestTransport()) await self._setup(bsc) tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot] for tier in tiers: blob_name = self.get_resource_name(tier.value) blob = bsc.get_blob_client(self.container_name, blob_name) await blob.upload_blob(b'hello world') blob_ref = await blob.get_blob_properties() self.assertIsNotNone(blob_ref.blob_tier) self.assertTrue(blob_ref.blob_tier_inferred) self.assertIsNone(blob_ref.blob_tier_change_time) # Act await blob.set_standard_blob_tier(tier) # Assert blob_ref2 = await blob.get_blob_properties() self.assertEqual(tier, blob_ref2.blob_tier) self.assertFalse(blob_ref2.blob_tier_inferred) self.assertIsNotNone(blob_ref2.blob_tier_change_time) await blob.delete_blob()
async def return_gallery_blobs(start=0, limit=None): blob_service_client = BlobServiceClient( account_url=f"https://{os.getenv('AZURE_STORAGE_ACCOUNT')}.blob.core.windows.net/", credential=os.getenv("AZURE_STORAGE_KEY"), ) container_client = blob_service_client.get_container_client( os.getenv("AZURE_STORAGE_VIDEO_CONTAINER") ) blobs_list = [] async for blob in container_client.list_blobs( # pylint: disable=E1133 include=["metadata"] ): metadata = blob.metadata created_at = blob.creation_time blobs_list.append( { "uuid": metadata["uuid"], "image_url": f"/get_thumbnail?video_uuid={metadata['uuid']}", "uploader_username": metadata["uploader_username"], "uploader_id": metadata["uploader_id"], "title": metadata["title"], "badge": metadata["badge"], "elapsed_time": date_funcs.elapsed_time_str(created_at), } ) await container_client.close() await blob_service_client.close() return blobs_list
async def test_standard_blob_tier_set_tier_api(self, resource_group, location, storage_account, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=storage_account_key, transport=AiohttpTestTransport()) await self._setup(bsc) container = bsc.get_container_client(self.container_name) tiers = [ StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot ] for tier in tiers: blob = self._get_blob_reference(bsc) data = b'hello world' await blob.upload_blob(data) blob_ref = await blob.get_blob_properties() self.assertIsNotNone(blob_ref.blob_tier) self.assertTrue(blob_ref.blob_tier_inferred) self.assertIsNone(blob_ref.blob_tier_change_time) blobs = [] async for b in container.list_blobs(): blobs.append(b) # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob.blob_name) self.assertIsNotNone(blobs[0].blob_tier) self.assertTrue(blobs[0].blob_tier_inferred) self.assertIsNone(blobs[0].blob_tier_change_time) await blob.set_standard_blob_tier(tier) blob_ref2 = await blob.get_blob_properties() self.assertEqual(tier, blob_ref2.blob_tier) self.assertFalse(blob_ref2.blob_tier_inferred) self.assertIsNotNone(blob_ref2.blob_tier_change_time) blobs = [] async for b in container.list_blobs(): blobs.append(b) # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob.blob_name) self.assertEqual(blobs[0].blob_tier, tier) self.assertFalse(blobs[0].blob_tier_inferred) self.assertIsNotNone(blobs[0].blob_tier_change_time) await blob.delete_blob()
async def test_rehydration_status(self, storage_account_name, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key, transport=AiohttpTestTransport()) await self._setup(bsc) blob_name = 'rehydration_test_blob_1' blob_name2 = 'rehydration_test_blob_2' container = bsc.get_container_client(self.container_name) data = b'hello world' blob = await container.upload_blob(blob_name, data) await blob.set_standard_blob_tier(StandardBlobTier.Archive) await blob.set_standard_blob_tier(StandardBlobTier.Cool) blob_ref = await blob.get_blob_properties() self.assertEqual(StandardBlobTier.Archive, blob_ref.blob_tier) self.assertEqual("rehydrate-pending-to-cool", blob_ref.archive_status) self.assertFalse(blob_ref.blob_tier_inferred) blobs = [] async for b in container.list_blobs(): blobs.append(b) await blob.delete_blob() # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob.blob_name) self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier) self.assertEqual("rehydrate-pending-to-cool", blobs[0].archive_status) self.assertFalse(blobs[0].blob_tier_inferred) blob2 = await container.upload_blob(blob_name2, data) await blob2.set_standard_blob_tier(StandardBlobTier.Archive) await blob2.set_standard_blob_tier(StandardBlobTier.Hot) blob_ref2 = await blob2.get_blob_properties() self.assertEqual(StandardBlobTier.Archive, blob_ref2.blob_tier) self.assertEqual("rehydrate-pending-to-hot", blob_ref2.archive_status) self.assertFalse(blob_ref2.blob_tier_inferred) blobs = [] async for b in container.list_blobs(): blobs.append(b) # Assert self.assertIsNotNone(blobs) self.assertGreaterEqual(len(blobs), 1) self.assertIsNotNone(blobs[0]) self.assertNamedItemInContainer(blobs, blob2.blob_name) self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier) self.assertEqual("rehydrate-pending-to-hot", blobs[0].archive_status) self.assertFalse(blobs[0].blob_tier_inferred)
def setUp(self): super(StorageContainerTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() self.bsc = BlobServiceClient(url, credential=credential, transport=AiohttpTestTransport()) loop = asyncio.get_event_loop() loop.run_until_complete(self.bsc.__aenter__()) self.test_containers = []
async def _setup(self, storage_account, key): self.bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=key) self.container_name = self.get_resource_name("container") if self.is_live: container = self.bsc.get_container_client(self.container_name) try: await container.create_container(timeout=5) except ResourceExistsError: pass self.byte_data = self.get_random_bytes(1024)
async def download_blob_using_blobservice(account_name: str, credential: DefaultAzureCredential, container_name:str , blob_name: str, file_stream: io.BytesIO): try: # Timeout didn't work on this code... blob_service = BlobServiceClient(f"{account_name}.blob.core.windows.net", credential=credential, connection_timeout=1, read_timeout=1) blob_client = blob_service.get_blob_client(container_name, blob_name) storage_stream_downloader = await blob_client.download_blob() await storage_stream_downloader.readinto(file_stream) return except ResourceNotFoundError: raise KeyError(blob_name) except ClientAuthenticationError: raise
def setUp(self): super(StorageAppendBlobTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() self.bsc = BlobServiceClient(url, credential=credential, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer')
def setUp(self): super(StorageBlobRetryTestAsync, self).setUp() url = self._get_account_url() credential = self._get_shared_key_credential() retry = ExponentialRetry(initial_backoff=1, increment_base=2, retry_total=3) self.bs = BlobServiceClient(url, credential=credential, retry_policy=retry) self.container_name = self.get_resource_name('utcontainer')
async def test_set_static_webprops_no_impact_other_props( self, resource_group, location, storage_account, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=storage_account_key, transport=AiohttpTestTransport()) cors_rule1 = CorsRule(['www.xyz.com'], ['GET']) allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"] allowed_methods = ['GET', 'PUT'] max_age_in_seconds = 500 exposed_headers = [ "x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd" ] allowed_headers = [ "x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo" ] cors_rule2 = CorsRule(allowed_origins, allowed_methods, max_age_in_seconds=max_age_in_seconds, exposed_headers=exposed_headers, allowed_headers=allowed_headers) cors = [cors_rule1, cors_rule2] # Act await bsc.set_service_properties(cors=cors) # Assert cors is updated received_props = await bsc.get_service_properties() self._assert_cors_equal(received_props['cors'], cors) bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=storage_account_key, transport=AiohttpTestTransport()) static_website = StaticWebsite( enabled=True, index_document="index.html", error_document404_path="errors/error/404error.html") # Act to set static website await bsc.set_service_properties(static_website=static_website) # Assert static website was updated was cors was unchanged received_props = await bsc.get_service_properties() self._assert_static_website_equal(received_props['static_website'], static_website) self._assert_cors_equal(received_props['cors'], cors)
async def test_response_callback_async(self, resource_group, location, storage_account, storage_account_key): # Arrange service = BlobServiceClient(self.account_url(storage_account, "blob"), credential=storage_account_key, transport=AiohttpTestTransport()) name = self.get_resource_name('cont') container = service.get_container_client(name) # Act def callback(response): response.http_response.status_code = 200 response.http_response.headers = {} # Assert exists = await container.get_container_properties(raw_response_hook=callback) self.assertTrue(exists)
def __init__(self, settings: BlobStorageSettings): if not settings.container_name: raise Exception("Container name is required.") if settings.connection_string: blob_service_client = BlobServiceClient.from_connection_string( settings.connection_string) else: blob_service_client = BlobServiceClient.from_connection_string( convert_account_name_and_key_to_connection_string(settings)) self.__container_client = blob_service_client.get_container_client( settings.container_name) self.__initialized = False
async def _test_container_sample_async(self): # [START create_container_client_from_service] # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient blob_service_client = BlobServiceClient.from_connection_string( self.connection_string) # Instantiate a ContainerClient container_client = blob_service_client.get_container_client( "mynewcontainerasync") # [END create_container_client_from_service] # [START create_container_client_sasurl] from azure.storage.blob.aio import ContainerClient sas_url = sas_url = "https://account.blob.core.windows.net/mycontainer?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D" container = ContainerClient.from_container_url(sas_url) # [END create_container_client_sasurl] try: # [START create_container] await container_client.create_container() # [END create_container] # [START get_container_properties] properties = await container_client.get_container_properties() # [END get_container_properties] assert properties is not None finally: # [START delete_container] await container_client.delete_container()
async def _test_list_blobs_in_container_async(self): # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient blob_service_client = BlobServiceClient.from_connection_string( self.connection_string) # Instantiate a ContainerClient container_client = blob_service_client.get_container_client( "myblobscontainerasync") # Create new Container await container_client.create_container() # [START upload_blob_to_container] with open(SOURCE_FILE, "rb") as data: blob_client = await container_client.upload_blob(name="blobby", data=data) properties = await blob_client.get_blob_properties() # [END upload_blob_to_container] # [START list_blobs_in_container] blobs_list = [] async for blob in container_client.list_blobs(): blobs_list.append(blob) # [END list_blobs_in_container] assert blobs_list is not None # Delete container await container_client.delete_container()
async def _test_set_metadata_on_container_async(self): # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient blob_service_client = BlobServiceClient.from_connection_string( self.connection_string) # Instantiate a ContainerClient container_client = blob_service_client.get_container_client( "mymetadatacontainerasync") try: # Create new Container await container_client.create_container() # [START set_container_metadata] # Create key, value pairs for metadata metadata = {'type': 'test'} # Set metadata on the container await container_client.set_container_metadata(metadata=metadata) # [END set_container_metadata] # Get container properties properties = (await container_client.get_container_properties()).metadata assert properties == metadata finally: # Delete container await container_client.delete_container()
async def list_blobs(self): async with AsyncBlobServiceClient.from_connection_string( self._connection_string) as client: container: AsyncContainerClient = client.get_container_client( self.container) async for blob in container.list_blobs(name_starts_with=self.path): yield blob
def setUp(self): super(StorageBlockBlobTestAsync, self).setUp() url = self._get_account_url() # test chunking functionality by reducing the size of each chunk, # otherwise the tests would take too long to execute self.bsc = BlobServiceClient( url, credential=self.settings.STORAGE_ACCOUNT_KEY, connection_data_block_size=4 * 1024, max_single_put_size=32 * 1024, max_block_size=4 * 1024, transport=AiohttpTestTransport()) self.config = self.bsc._config self.container_name = self.get_resource_name('utcontainer')
async def test_set_cors(self, resource_group, location, storage_account, storage_account_key): bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=storage_account_key, transport=AiohttpTestTransport()) cors_rule1 = CorsRule(['www.xyz.com'], ['GET']) allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"] allowed_methods = ['GET', 'PUT'] max_age_in_seconds = 500 exposed_headers = [ "x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc", "x-ms-meta-bcd" ] allowed_headers = [ "x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz", "x-ms-meta-foo" ] cors_rule2 = CorsRule(allowed_origins, allowed_methods, max_age_in_seconds=max_age_in_seconds, exposed_headers=exposed_headers, allowed_headers=allowed_headers) cors = [cors_rule1, cors_rule2] # Act await bsc.set_service_properties(cors=cors) # Assert received_props = await bsc.get_service_properties() self._assert_cors_equal(received_props['cors'], cors)