Ejemplo n.º 1
0
    async def test_list_blobs(self, resource_group, location, storage_account,
                              storage_account_key):
        # Arrange
        bsc = BlobServiceClient(self.account_url(storage_account, "blob"),
                                credential=storage_account_key,
                                connection_data_block_size=1024,
                                max_single_put_size=1024,
                                min_large_block_upload_threshold=1024,
                                max_block_size=1024,
                                max_page_size=1024)
        await self._setup(bsc)
        blob_client, _ = await self._create_block_blob(
            bsc,
            blob_name="blockblob",
            data=b'AAABBBCCC',
            encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
        await self._create_append_blob(
            bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)

        container_client = bsc.get_container_client(self.container_name)

        generator = container_client.list_blobs(include="metadata")
        async for blob in generator:
            self.assertIsNotNone(blob)
            # Assert: every listed blob has encryption_scope
            self.assertEqual(blob.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)

        self._teardown(bsc)
Ejemplo n.º 2
0
async def return_gallery_blobs(start=0, limit=None):
    blob_service_client = BlobServiceClient(
        account_url=f"https://{os.getenv('AZURE_STORAGE_ACCOUNT')}.blob.core.windows.net/",
        credential=os.getenv("AZURE_STORAGE_KEY"),
    )
    container_client = blob_service_client.get_container_client(
        os.getenv("AZURE_STORAGE_VIDEO_CONTAINER")
    )
    blobs_list = []
    async for blob in container_client.list_blobs(  # pylint: disable=E1133
        include=["metadata"]
    ):
        metadata = blob.metadata
        created_at = blob.creation_time
        blobs_list.append(
            {
                "uuid": metadata["uuid"],
                "image_url": f"/get_thumbnail?video_uuid={metadata['uuid']}",
                "uploader_username": metadata["uploader_username"],
                "uploader_id": metadata["uploader_id"],
                "title": metadata["title"],
                "badge": metadata["badge"],
                "elapsed_time": date_funcs.elapsed_time_str(created_at),
            }
        )
    await container_client.close()
    await blob_service_client.close()
    return blobs_list
Ejemplo n.º 3
0
    def test_service_client_api_version_property(self):
        service_client = BlobServiceClient(
            "https://foo.blob.core.windows.net/account", credential="fake_key")
        self.assertEqual(service_client.api_version, self.api_version_2)
        self.assertEqual(service_client._client._config.version,
                         self.api_version_2)

        with pytest.raises(AttributeError):
            service_client.api_version = "foo"

        service_client = BlobServiceClient(
            "https://foo.blob.core.windows.net/account",
            credential="fake_key",
            api_version=self.api_version_1)
        self.assertEqual(service_client.api_version, self.api_version_1)
        self.assertEqual(service_client._client._config.version,
                         self.api_version_1)

        container_client = service_client.get_container_client("foo")
        self.assertEqual(container_client.api_version, self.api_version_1)
        self.assertEqual(container_client._client._config.version,
                         self.api_version_1)

        blob_client = service_client.get_blob_client("foo", "bar")
        self.assertEqual(blob_client.api_version, self.api_version_1)
        self.assertEqual(blob_client._client._config.version,
                         self.api_version_1)
    async def test_sas_signature_is_scrubbed_off(self, storage_account_name,
                                                 storage_account_key):
        # Test can only run live

        bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"),
                                storage_account_key)
        await self._setup(bsc)
        # Arrange
        container = bsc.get_container_client(self.container_name)
        token = generate_container_sas(
            container.account_name,
            container.container_name,
            account_key=container.credential.account_key,
            permission=ContainerSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        # parse out the signed signature
        token_components = parse_qs(token)
        signed_signature = quote(
            token_components[QueryStringConstants.SIGNED_SIGNATURE][0])

        sas_service = ContainerClient.from_container_url(container.url,
                                                         credential=token)

        # Act
        with LogCaptured(self) as log_captured:
            await sas_service.get_account_information(logging_enable=True)
            log_as_str = log_captured.getvalue()

            # Assert
            # make sure the query parameter 'sig' is logged, but its value is not
            self.assertTrue(
                QueryStringConstants.SIGNED_SIGNATURE in log_as_str)
            self.assertFalse(signed_signature in log_as_str)
    async def test_logging_request_and_response_body(self,
                                                     storage_account_name,
                                                     storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"),
                                storage_account_key,
                                transport=AiohttpTestTransport(),
                                logging_enable=True)
        await self._setup(bsc)
        # Arrange
        container = bsc.get_container_client(self.container_name)
        request_body = 'testloggingbody'
        blob_name = self.get_resource_name("testloggingblob")
        blob_client = container.get_blob_client(blob_name)
        await blob_client.upload_blob(request_body, overwrite=True)
        # Act
        with LogCaptured(self) as log_captured:
            await blob_client.download_blob()
            log_as_str = log_captured.getvalue()
            self.assertFalse(request_body in log_as_str)

        with LogCaptured(self) as log_captured:
            await blob_client.upload_blob(request_body,
                                          overwrite=True,
                                          logging_body=True)
            log_as_str = log_captured.getvalue()
            self.assertTrue(request_body in log_as_str)
            self.assertEqual(log_as_str.count(request_body), 1)
Ejemplo n.º 6
0
async def validate_container(client: BlobServiceClient,
                             container: str) -> None:
    """ This makes sure that the given container exists and we have access to it.
    Will raise an error which is caught further out. """
    container_client = client.get_container_client(container)
    props = await container_client.get_container_properties()
    logging.info(props)
    return None
Ejemplo n.º 7
0
    async def test_standard_blob_tier_set_tier_api(self, resource_group,
                                                   location, storage_account,
                                                   storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account, "blob"),
                                credential=storage_account_key,
                                transport=AiohttpTestTransport())

        await self._setup(bsc)
        container = bsc.get_container_client(self.container_name)
        tiers = [
            StandardBlobTier.Archive, StandardBlobTier.Cool,
            StandardBlobTier.Hot
        ]

        for tier in tiers:
            blob = self._get_blob_reference(bsc)
            data = b'hello world'
            await blob.upload_blob(data)

            blob_ref = await blob.get_blob_properties()
            self.assertIsNotNone(blob_ref.blob_tier)
            self.assertTrue(blob_ref.blob_tier_inferred)
            self.assertIsNone(blob_ref.blob_tier_change_time)

            blobs = []
            async for b in container.list_blobs():
                blobs.append(b)

            # Assert
            self.assertIsNotNone(blobs)
            self.assertGreaterEqual(len(blobs), 1)
            self.assertIsNotNone(blobs[0])
            self.assertNamedItemInContainer(blobs, blob.blob_name)
            self.assertIsNotNone(blobs[0].blob_tier)
            self.assertTrue(blobs[0].blob_tier_inferred)
            self.assertIsNone(blobs[0].blob_tier_change_time)

            await blob.set_standard_blob_tier(tier)

            blob_ref2 = await blob.get_blob_properties()
            self.assertEqual(tier, blob_ref2.blob_tier)
            self.assertFalse(blob_ref2.blob_tier_inferred)
            self.assertIsNotNone(blob_ref2.blob_tier_change_time)

            blobs = []
            async for b in container.list_blobs():
                blobs.append(b)

            # Assert
            self.assertIsNotNone(blobs)
            self.assertGreaterEqual(len(blobs), 1)
            self.assertIsNotNone(blobs[0])
            self.assertNamedItemInContainer(blobs, blob.blob_name)
            self.assertEqual(blobs[0].blob_tier, tier)
            self.assertFalse(blobs[0].blob_tier_inferred)
            self.assertIsNotNone(blobs[0].blob_tier_change_time)

            await blob.delete_blob()
    async def test_rehydration_status(self, storage_account_name,
                                      storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"),
                                credential=storage_account_key,
                                transport=AiohttpTestTransport())
        await self._setup(bsc)
        blob_name = 'rehydration_test_blob_1'
        blob_name2 = 'rehydration_test_blob_2'
        container = bsc.get_container_client(self.container_name)

        data = b'hello world'
        blob = await container.upload_blob(blob_name, data)
        await blob.set_standard_blob_tier(StandardBlobTier.Archive)
        await blob.set_standard_blob_tier(StandardBlobTier.Cool)

        blob_ref = await blob.get_blob_properties()
        self.assertEqual(StandardBlobTier.Archive, blob_ref.blob_tier)
        self.assertEqual("rehydrate-pending-to-cool", blob_ref.archive_status)
        self.assertFalse(blob_ref.blob_tier_inferred)

        blobs = []
        async for b in container.list_blobs():
            blobs.append(b)

        await blob.delete_blob()

        # Assert
        self.assertIsNotNone(blobs)
        self.assertGreaterEqual(len(blobs), 1)
        self.assertIsNotNone(blobs[0])
        self.assertNamedItemInContainer(blobs, blob.blob_name)
        self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier)
        self.assertEqual("rehydrate-pending-to-cool", blobs[0].archive_status)
        self.assertFalse(blobs[0].blob_tier_inferred)

        blob2 = await container.upload_blob(blob_name2, data)
        await blob2.set_standard_blob_tier(StandardBlobTier.Archive)
        await blob2.set_standard_blob_tier(StandardBlobTier.Hot)

        blob_ref2 = await blob2.get_blob_properties()
        self.assertEqual(StandardBlobTier.Archive, blob_ref2.blob_tier)
        self.assertEqual("rehydrate-pending-to-hot", blob_ref2.archive_status)
        self.assertFalse(blob_ref2.blob_tier_inferred)

        blobs = []
        async for b in container.list_blobs():
            blobs.append(b)

        # Assert
        self.assertIsNotNone(blobs)
        self.assertGreaterEqual(len(blobs), 1)
        self.assertIsNotNone(blobs[0])
        self.assertNamedItemInContainer(blobs, blob2.blob_name)
        self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier)
        self.assertEqual("rehydrate-pending-to-hot", blobs[0].archive_status)
        self.assertFalse(blobs[0].blob_tier_inferred)
Ejemplo n.º 9
0
    async def test_response_callback_async(self, resource_group, location, storage_account, storage_account_key):
        # Arrange
        service = BlobServiceClient(self.account_url(storage_account, "blob"), credential=storage_account_key, transport=AiohttpTestTransport())
        name = self.get_resource_name('cont')
        container = service.get_container_client(name)

        # Act
        def callback(response):
            response.http_response.status_code = 200
            response.http_response.headers = {}

        # Assert
        exists = await container.get_container_properties(raw_response_hook=callback)
        self.assertTrue(exists)
 async def test_authorization_is_scrubbed_off(self, storage_account_name,
                                              storage_account_key):
     bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"),
                             storage_account_key,
                             transport=AiohttpTestTransport())
     await self._setup(bsc)
     # Arrange
     container = bsc.get_container_client(self.container_name)
     # Act
     with LogCaptured(self) as log_captured:
         await container.get_container_properties(logging_enable=True)
         log_as_str = log_captured.getvalue()
         # Assert
         # make sure authorization header is logged, but its value is not
         # the keyword SharedKey is present in the authorization header's value
         self.assertTrue(_AUTHORIZATION_HEADER_NAME in log_as_str)
         self.assertFalse('SharedKey' in log_as_str)
Ejemplo n.º 11
0
class StorageContainerTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageContainerTestAsync, self).setUp()
        url = self._get_account_url()
        credential = self._get_shared_key_credential()
        self.bsc = BlobServiceClient(url,
                                     credential=credential,
                                     transport=AiohttpTestTransport())
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.bsc.__aenter__())
        self.test_containers = []

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            for container_name in self.test_containers:
                try:
                    container = self.bsc.get_container_client(container_name)
                    loop.run_until_complete(container.delete_container())
                except HttpResponseError:
                    try:
                        lease = LeaseClient(container)
                        loop.run_until_complete(lease.break_lease(0))
                        loop.run_until_complete(container.delete_container())
                    except:
                        pass
                except:
                    pass
            loop.run_until_complete(self.bsc.__aexit__())
        return super(StorageContainerTestAsync, self).tearDown()

    #--Helpers-----------------------------------------------------------------
    def _get_container_reference(self, prefix=TEST_CONTAINER_PREFIX):
        container_name = self.get_resource_name(prefix)
        self.test_containers.append(container_name)
        return container_name

    async def _create_container(self, prefix=TEST_CONTAINER_PREFIX):
        container_name = self._get_container_reference(prefix)
        container = self.bsc.get_container_client(container_name)
        try:
            await container.create_container()
        except ResourceExistsError:
            pass
        return container

    #--Test cases for containers -----------------------------------------

    async def _test_create_container(self):
        # Arrange
        container_name = self._get_container_reference()

        # Act
        container = self.bsc.get_container_client(container_name)
        created = await container.create_container()

        # Assert
        self.assertTrue(created)

    @record
    def test_create_container(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_container())

    async def _test_create_container_with_already_existing_container_fail_on_exist(
            self):
        # Arrange
        container_name = self._get_container_reference()

        # Act
        container = self.bsc.get_container_client(container_name)
        created = await container.create_container()
        with self.assertRaises(HttpResponseError):
            await container.create_container()

        # Assert
        self.assertTrue(created)

    @record
    def test_create_container_with_already_existing_container_fail_on_exist(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_container_with_already_existing_container_fail_on_exist(
            ))

    async def _test_create_container_with_public_access_container(self):
        # Arrange
        container_name = self._get_container_reference()

        # Act
        container = self.bsc.get_container_client(container_name)
        created = await container.create_container(public_access='container')

        # Assert
        self.assertTrue(created)

    @record
    def test_create_container_with_public_access_container(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_container_with_public_access_container())

    async def _test_create_container_with_public_access_blob(self):
        # Arrange
        container_name = self._get_container_reference()

        # Act
        container = self.bsc.get_container_client(container_name)
        created = await container.create_container(public_access='blob')

        blob = container.get_blob_client("blob1")
        await blob.upload_blob(u'xyz')

        anonymous_service = BlobClient(self._get_account_url(),
                                       container=container_name,
                                       blob="blob1")

        # Assert
        self.assertTrue(created)
        await anonymous_service.download_blob()

    @record
    def test_create_container_with_public_access_blob(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_container_with_public_access_blob())

    async def _test_create_container_with_metadata(self):
        # Arrange
        container_name = self._get_container_reference()
        metadata = {'hello': 'world', 'number': '42'}

        # Act
        container = self.bsc.get_container_client(container_name)
        created = await container.create_container(metadata)

        # Assert
        self.assertTrue(created)
        md_cr = await container.get_container_properties()
        md = md_cr.metadata
        self.assertDictEqual(md, metadata)

    @record
    def test_create_container_with_metadata(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_container_with_metadata())

    async def _test_container_exists_with_lease(self):
        # Arrange
        container = await self._create_container()
        await container.acquire_lease()

        # Act
        exists = await container.get_container_properties()

        # Assert
        self.assertTrue(exists)

    @record
    def test_container_exists_with_lease(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_container_exists_with_lease())

    async def _test_unicode_create_container_unicode_name(self):
        # Arrange
        container_name = u'啊齄丂狛狜'

        container = self.bsc.get_container_client(container_name)
        # Act
        with self.assertRaises(HttpResponseError):
            # not supported - container name must be alphanumeric, lowercase
            await container.create_container()

        # Assert

    @record
    def test_unicode_create_container_unicode_name(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_unicode_create_container_unicode_name())

    async def _test_list_containers(self):
        # Arrange
        container = await self._create_container()

        # Act
        containers = []
        async for c in self.bsc.list_containers():
            containers.append(c)

        # Assert
        self.assertIsNotNone(containers)
        self.assertGreaterEqual(len(containers), 1)
        self.assertIsNotNone(containers[0])
        self.assertNamedItemInContainer(containers, container.container_name)
        self.assertIsNotNone(containers[0].has_immutability_policy)
        self.assertIsNotNone(containers[0].has_legal_hold)

    @record
    def test_list_containers(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_containers())

    async def _test_list_containers_with_prefix(self):
        # Arrange
        container = await self._create_container()

        # Act
        containers = []
        async for c in self.bsc.list_containers(
                name_starts_with=container.container_name):
            containers.append(c)

        # Assert
        self.assertIsNotNone(containers)
        self.assertEqual(len(containers), 1)
        self.assertIsNotNone(containers[0])
        self.assertEqual(containers[0].name, container.container_name)
        self.assertIsNone(containers[0].metadata)

    @record
    def test_list_containers_with_prefix(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_containers_with_prefix())

    async def _test_list_containers_with_include_metadata(self):
        # Arrange
        container = await self._create_container()
        metadata = {'hello': 'world', 'number': '42'}
        resp = await container.set_container_metadata(metadata)

        # Act
        containers = []
        async for c in self.bsc.list_containers(
                name_starts_with=container.container_name,
                include_metadata=True):
            containers.append(c)

        # Assert
        self.assertIsNotNone(containers)
        self.assertGreaterEqual(len(containers), 1)
        self.assertIsNotNone(containers[0])
        self.assertNamedItemInContainer(containers, container.container_name)
        self.assertDictEqual(containers[0].metadata, metadata)

    @record
    def test_list_containers_with_include_metadata(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_containers_with_include_metadata())

    async def _test_list_containers_with_public_access(self):
        # Arrange
        container = await self._create_container()
        resp = await container.set_container_access_policy(
            public_access=PublicAccess.Blob)

        # Act
        containers = []
        async for c in self.bsc.list_containers(
                name_starts_with=container.container_name):
            containers.append(c)

        # Assert
        self.assertIsNotNone(containers)
        self.assertGreaterEqual(len(containers), 1)
        self.assertIsNotNone(containers[0])
        self.assertNamedItemInContainer(containers, container.container_name)
        self.assertEqual(containers[0].public_access, PublicAccess.Blob)

    @record
    def test_list_containers_with_public_access(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_containers_with_public_access())

    async def _test_list_containers_with_num_results_and_marker(self):
        # Arrange
        prefix = 'listcontainer'
        container_names = []
        for i in range(0, 4):
            cr = await self._create_container(prefix + str(i))
            container_names.append(cr.container_name)

        container_names.sort()

        # Act
        generator1 = self.bsc.list_containers(name_starts_with=prefix,
                                              results_per_page=2).by_page()
        containers1 = []
        async for c in await generator1.__anext__():
            containers1.append(c)

        generator2 = self.bsc.list_containers(
            name_starts_with=prefix,
            results_per_page=2).by_page(generator1.continuation_token)
        containers2 = []
        async for c in await generator2.__anext__():
            containers2.append(c)

        # Assert
        self.assertIsNotNone(containers1)
        self.assertEqual(len(containers1), 2)
        self.assertNamedItemInContainer(containers1, container_names[0])
        self.assertNamedItemInContainer(containers1, container_names[1])
        self.assertIsNotNone(containers2)
        self.assertEqual(len(containers2), 2)
        self.assertNamedItemInContainer(containers2, container_names[2])
        self.assertNamedItemInContainer(containers2, container_names[3])

    @record
    def test_list_containers_with_num_results_and_marker(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_containers_with_num_results_and_marker())

    async def _test_set_container_metadata(self):
        # Arrange
        metadata = {'hello': 'world', 'number': '43'}
        container = await self._create_container()

        # Act
        await container.set_container_metadata(metadata)
        md = await container.get_container_properties()
        metadata_from_response = md.metadata
        # Assert
        self.assertDictEqual(metadata_from_response, metadata)

    @record
    def test_set_container_metadata(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_set_container_metadata())

    async def _test_set_container_metadata_with_lease_id(self):
        # Arrange
        metadata = {'hello': 'world', 'number': '43'}
        container = await self._create_container()
        lease_id = await container.acquire_lease()

        # Act
        await container.set_container_metadata(metadata, lease_id)

        # Assert
        md = await container.get_container_properties()
        md = md.metadata
        self.assertDictEqual(md, metadata)

    @record
    def test_set_container_metadata_with_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_container_metadata_with_lease_id())

    async def _test_set_container_metadata_with_non_existing_container(self):
        # Arrange
        container_name = self._get_container_reference()
        container = self.bsc.get_container_client(container_name)

        # Act
        with self.assertRaises(ResourceNotFoundError):
            await container.set_container_metadata({
                'hello': 'world',
                'number': '43'
            })

        # Assert

    @record
    def test_set_container_metadata_with_non_existing_container(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_container_metadata_with_non_existing_container())

    async def _test_get_container_metadata(self):
        # Arrange
        metadata = {'hello': 'world', 'number': '42'}
        container = await self._create_container()
        await container.set_container_metadata(metadata)

        # Act
        md_cr = await container.get_container_properties()
        md = md_cr.metadata

        # Assert
        self.assertDictEqual(md, metadata)

    @record
    def test_get_container_metadata(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_container_metadata())

    async def _test_get_container_metadata_with_lease_id(self):
        # Arrange
        metadata = {'hello': 'world', 'number': '42'}
        container = await self._create_container()
        await container.set_container_metadata(metadata)
        lease_id = await container.acquire_lease()

        # Act
        md = await container.get_container_properties(lease_id)
        md = md.metadata

        # Assert
        self.assertDictEqual(md, metadata)

    @record
    def test_get_container_metadata_with_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_container_metadata_with_lease_id())

    async def _test_get_container_properties(self):
        # Arrange
        metadata = {'hello': 'world', 'number': '42'}
        container = await self._create_container()
        await container.set_container_metadata(metadata)

        # Act
        props = await container.get_container_properties()

        # Assert
        self.assertIsNotNone(props)
        self.assertDictEqual(props.metadata, metadata)
        # self.assertEqual(props.lease.duration, 'infinite')
        # self.assertEqual(props.lease.state, 'leased')
        # self.assertEqual(props.lease.status, 'locked')
        # self.assertEqual(props.public_access, 'container')
        self.assertIsNotNone(props.has_immutability_policy)
        self.assertIsNotNone(props.has_legal_hold)

    @record
    def test_get_container_properties(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_container_properties())

    async def _test_get_container_properties_with_lease_id(self):
        # Arrange
        metadata = {'hello': 'world', 'number': '42'}
        container = await self._create_container()
        await container.set_container_metadata(metadata)
        lease_id = await container.acquire_lease()

        # Act
        props = await container.get_container_properties(lease_id)
        await lease_id.break_lease()

        # Assert
        self.assertIsNotNone(props)
        self.assertDictEqual(props.metadata, metadata)
        self.assertEqual(props.lease.duration, 'infinite')
        self.assertEqual(props.lease.state, 'leased')
        self.assertEqual(props.lease.status, 'locked')

    @record
    def test_get_container_properties_with_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_container_properties_with_lease_id())

    async def _test_get_container_acl(self):
        # Arrange
        container = await self._create_container()

        # Act
        acl = await container.get_container_access_policy()

        # Assert
        self.assertIsNotNone(acl)
        self.assertIsNone(acl.get('public_access'))
        self.assertEqual(len(acl.get('signed_identifiers')), 0)

    @record
    def test_get_container_acl(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_container_acl())

    async def _test_get_container_acl_with_lease_id(self):
        # Arrange
        container = await self._create_container()
        lease_id = await container.acquire_lease()

        # Act
        acl = await container.get_container_access_policy(lease_id)

        # Assert
        self.assertIsNotNone(acl)
        self.assertIsNone(acl.get('public_access'))

    @record
    def test_get_container_acl_with_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_container_acl_with_lease_id())

    async def _test_set_container_acl(self):
        # Arrange
        container = await self._create_container()

        # Act
        response = await container.set_container_access_policy()

        self.assertIsNotNone(response.get('etag'))
        self.assertIsNotNone(response.get('last_modified'))

        # Assert
        acl = await container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl.get('signed_identifiers')), 0)
        self.assertIsNone(acl.get('public_access'))

    @record
    def test_set_container_acl(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_set_container_acl())

    async def _test_set_container_acl_with_one_signed_identifier(self):
        # Arrange
        from dateutil.tz import tzutc
        container = await self._create_container()

        # Act
        access_policy = AccessPolicy(permission=ContainerPermissions.READ,
                                     expiry=datetime.utcnow() +
                                     timedelta(hours=1),
                                     start=datetime.utcnow())
        signed_identifier = {'testid': access_policy}

        response = await container.set_container_access_policy(
            signed_identifier)

        # Assert
        self.assertIsNotNone(response.get('etag'))
        self.assertIsNotNone(response.get('last_modified'))

    @record
    def test_set_container_acl_with_one_signed_identifier(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_container_acl_with_one_signed_identifier())

    async def _test_set_container_acl_with_lease_id(self):
        # Arrange
        container = await self._create_container()
        lease_id = await container.acquire_lease()

        # Act
        await container.set_container_access_policy(lease=lease_id)

        # Assert
        acl = await container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertIsNone(acl.get('public_access'))

    @record
    def test_set_container_acl_with_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_set_container_acl_with_lease_id())

    async def _test_set_container_acl_with_public_access(self):
        # Arrange
        container = await self._create_container()

        # Act
        await container.set_container_access_policy(public_access='container')

        # Assert
        acl = await container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual('container', acl.get('public_access'))

    @record
    def test_set_container_acl_with_public_access(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_container_acl_with_public_access())

    async def _test_set_container_acl_with_empty_signed_identifiers(self):
        # Arrange
        container = await self._create_container()

        # Act
        await container.set_container_access_policy(signed_identifiers=dict())

        # Assert
        acl = await container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl.get('signed_identifiers')), 0)
        self.assertIsNone(acl.get('public_access'))

    @record
    def test_set_container_acl_with_empty_signed_identifiers(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_container_acl_with_empty_signed_identifiers())

    async def _test_set_container_acl_with_signed_identifiers(self):
        # Arrange
        container = await self._create_container()

        # Act
        access_policy = AccessPolicy(
            permission=ContainerPermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1),
            start=datetime.utcnow() - timedelta(minutes=1))
        identifiers = {'testid': access_policy}
        await container.set_container_access_policy(identifiers)

        # Assert
        acl = await container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual('testid', acl.get('signed_identifiers')[0].id)
        self.assertIsNone(acl.get('public_access'))

    @record
    def test_set_container_acl_with_signed_identifiers(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_container_acl_with_signed_identifiers())

    async def _test_set_container_acl_with_empty_identifiers(self):
        # Arrange
        container = await self._create_container()
        identifiers = {i: None for i in range(0, 3)}

        # Act
        await container.set_container_access_policy(identifiers)

        # Assert
        acl = await container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl.get('signed_identifiers')), 3)
        self.assertEqual('0', acl.get('signed_identifiers')[0].id)
        self.assertIsNone(acl.get('signed_identifiers')[0].access_policy)
        self.assertIsNone(acl.get('public_access'))

    @record
    def test_set_container_acl_with_empty_identifiers(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_container_acl_with_empty_identifiers())

    async def _test_set_container_acl_with_three_identifiers(self):
        # Arrange
        container = await self._create_container()
        access_policy = AccessPolicy(
            permission=ContainerPermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1),
            start=datetime.utcnow() - timedelta(minutes=1))
        identifiers = {i: access_policy for i in range(2)}

        # Act
        await container.set_container_access_policy(identifiers)

        # Assert
        acl = await container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl.get('signed_identifiers')), 2)
        self.assertEqual('0', acl.get('signed_identifiers')[0].id)
        self.assertIsNotNone(acl.get('signed_identifiers')[0].access_policy)
        self.assertIsNone(acl.get('public_access'))

    @record
    def test_set_container_acl_with_three_identifiers(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_container_acl_with_three_identifiers())

    async def _test_set_container_acl_too_many_ids(self):
        # Arrange
        container_name = await self._create_container()

        # Act
        identifiers = dict()
        for i in range(0, 6):
            identifiers['id{}'.format(i)] = AccessPolicy()

        # Assert
        with self.assertRaises(ValueError) as e:
            await container_name.set_container_access_policy(identifiers)
        self.assertEqual(
            str(e.exception),
            'Too many access policies provided. The server does not support setting more than 5 access policies on a single resource.'
        )

    @record
    def test_set_container_acl_too_many_ids(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_set_container_acl_too_many_ids())

    async def _test_lease_container_acquire_and_release(self):
        # Arrange
        container = await self._create_container()

        # Act
        lease = await container.acquire_lease()
        await lease.release()

        # Assert

    @record
    def test_lease_container_acquire_and_release(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_lease_container_acquire_and_release())

    async def _test_lease_container_renew(self):
        # Arrange
        container = await self._create_container()
        lease = await container.acquire_lease(lease_duration=15)
        self.sleep(10)
        lease_id_start = lease.id

        # Act
        await lease.renew()

        # Assert
        self.assertEqual(lease.id, lease_id_start)
        self.sleep(5)
        with self.assertRaises(HttpResponseError):
            await container.delete_container()
        self.sleep(10)
        await container.delete_container()

    @record
    def test_lease_container_renew(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_lease_container_renew())

    async def _test_lease_container_break_period(self):
        # Arrange
        container = await self._create_container()

        # Act
        lease = await container.acquire_lease(lease_duration=15)

        # Assert
        await lease.break_lease(lease_break_period=5)
        self.sleep(6)
        with self.assertRaises(HttpResponseError):
            await container.delete_container(lease=lease)

    @record
    def test_lease_container_break_period(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_lease_container_break_period())

    async def _test_lease_container_break_released_lease_fails(self):
        # Arrange
        container = await self._create_container()
        lease = await container.acquire_lease()
        await lease.release()

        # Act
        with self.assertRaises(HttpResponseError):
            await lease.break_lease()

        # Assert

    @record
    def test_lease_container_break_released_lease_fails(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_lease_container_break_released_lease_fails())

    async def _test_lease_container_with_duration(self):
        # Arrange
        container = await self._create_container()

        # Act
        lease = await container.acquire_lease(lease_duration=15)

        # Assert
        with self.assertRaises(HttpResponseError):
            await container.acquire_lease()
        self.sleep(15)
        await container.acquire_lease()

    @record
    def test_lease_container_with_duration(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_lease_container_with_duration())

    async def _test_lease_container_twice(self):
        # Arrange
        container = await self._create_container()

        # Act
        lease = await container.acquire_lease(lease_duration=15)

        # Assert
        lease2 = await container.acquire_lease(lease_id=lease.id)
        self.assertEqual(lease.id, lease2.id)

    @record
    def test_lease_container_twice(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_lease_container_twice())

    async def _test_lease_container_with_proposed_lease_id(self):
        # Arrange
        container = await self._create_container()

        # Act
        proposed_lease_id = '55e97f64-73e8-4390-838d-d9e84a374321'
        lease = await container.acquire_lease(lease_id=proposed_lease_id)

        # Assert
        self.assertEqual(proposed_lease_id, lease.id)

    @record
    def test_lease_container_with_proposed_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_lease_container_with_proposed_lease_id())

    async def _test_lease_container_change_lease_id(self):
        # Arrange
        container = await self._create_container()

        # Act
        lease_id = '29e0b239-ecda-4f69-bfa3-95f6af91464c'
        lease = await container.acquire_lease()
        lease_id1 = lease.id
        await lease.change(proposed_lease_id=lease_id)
        await lease.renew()
        lease_id2 = lease.id

        # Assert
        self.assertIsNotNone(lease_id1)
        self.assertIsNotNone(lease_id2)
        self.assertNotEqual(lease_id1, lease_id)
        self.assertEqual(lease_id2, lease_id)

    @record
    def test_lease_container_change_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_lease_container_change_lease_id())

    async def _test_delete_container_with_existing_container(self):
        # Arrange
        container = await self._create_container()

        # Act
        deleted = await container.delete_container()

        # Assert
        self.assertIsNone(deleted)

    @record
    def test_delete_container_with_existing_container(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_delete_container_with_existing_container())

    async def _test_delete_container_with_non_existing_container_fail_not_exist(
            self):
        # Arrange
        container_name = self._get_container_reference()
        container = self.bsc.get_container_client(container_name)

        # Act
        with LogCaptured(self) as log_captured:
            with self.assertRaises(ResourceNotFoundError):
                await container.delete_container()

            log_as_str = log_captured.getvalue()
            #self.assertTrue('ERROR' in log_as_str)

    @record
    def test_delete_container_with_non_existing_container_fail_not_exist(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_delete_container_with_non_existing_container_fail_not_exist(
            ))

    async def _test_delete_container_with_lease_id(self):
        # Arrange
        container = await self._create_container()
        lease = await container.acquire_lease(lease_duration=15)

        # Act
        deleted = await container.delete_container(lease=lease)

        # Assert
        self.assertIsNone(deleted)
        with self.assertRaises(ResourceNotFoundError):
            await container.get_container_properties()

    @record
    def test_delete_container_with_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_delete_container_with_lease_id())

    async def _test_list_names(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'

        await (container.get_blob_client('blob1')).upload_blob(data)
        await (container.get_blob_client('blob2')).upload_blob(data)

        # Act
        blobs = []
        async for b in container.list_blobs():
            blobs.append(b.name)

        self.assertEqual(blobs, ['blob1', 'blob2'])

    @record
    def test_list_names(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_names())

    async def _test_list_blobs(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'
        cr0 = container.get_blob_client('blob1')
        await cr0.upload_blob(data)
        cr1 = container.get_blob_client('blob2')
        await cr1.upload_blob(data)

        # Act
        blobs = []
        async for b in container.list_blobs():
            blobs.append(b)

        # Assert
        self.assertIsNotNone(blobs)
        self.assertGreaterEqual(len(blobs), 2)
        self.assertIsNotNone(blobs[0])
        self.assertNamedItemInContainer(blobs, 'blob1')
        self.assertNamedItemInContainer(blobs, 'blob2')
        self.assertEqual(blobs[0].size, 11)
        self.assertEqual(blobs[1].content_settings.content_type,
                         'application/octet-stream')
        self.assertIsNotNone(blobs[0].creation_time)

    @record
    def test_list_blobs(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs())

    async def _test_list_blobs_leased_blob(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'
        blob1 = container.get_blob_client('blob1')
        await blob1.upload_blob(data)
        lease = await blob1.acquire_lease()

        # Act
        resp = []
        async for b in container.list_blobs():
            resp.append(b)
        # Assert
        self.assertIsNotNone(resp)
        self.assertGreaterEqual(len(resp), 1)
        self.assertIsNotNone(resp[0])
        self.assertNamedItemInContainer(resp, 'blob1')
        self.assertEqual(resp[0].size, 11)
        self.assertEqual(resp[0].lease.duration, 'infinite')
        self.assertEqual(resp[0].lease.status, 'locked')
        self.assertEqual(resp[0].lease.state, 'leased')

    @record
    def test_list_blobs_leased_blob(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs_leased_blob())

    async def _test_list_blobs_with_prefix(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'
        c0 = container.get_blob_client('blob_a1')
        await c0.upload_blob(data)
        c1 = container.get_blob_client('blob_a2')
        await c1.upload_blob(data)
        c2 = container.get_blob_client('blob_b1')
        await c2.upload_blob(data)

        # Act
        resp = []
        async for b in container.list_blobs(name_starts_with='blob_a'):
            resp.append(b)

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), 2)
        self.assertNamedItemInContainer(resp, 'blob_a1')
        self.assertNamedItemInContainer(resp, 'blob_a2')

    @record
    def test_list_blobs_with_prefix(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs_with_prefix())

    async def _test_list_blobs_with_num_results(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'
        c0 = container.get_blob_client('blob_a1')
        await c0.upload_blob(data)
        c1 = container.get_blob_client('blob_a2')
        await c1.upload_blob(data)
        c2 = container.get_blob_client('blob_a3')
        await c2.upload_blob(data)
        c3 = container.get_blob_client('blob_b1')
        await c3.upload_blob(data)

        # Act
        generator = container.list_blobs(results_per_page=2).by_page()
        blobs = []
        async for b in await generator.__anext__():
            blobs.append(b)

        # Assert
        self.assertIsNotNone(blobs)
        self.assertEqual(len(blobs), 2)
        self.assertNamedItemInContainer(generator.current_page, 'blob_a1')
        self.assertNamedItemInContainer(generator.current_page, 'blob_a2')

    @record
    def test_list_blobs_with_num_results(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs_with_num_results())

    async def _test_list_blobs_with_include_snapshots(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'
        blob1 = container.get_blob_client('blob1')
        await blob1.upload_blob(data)
        await blob1.create_snapshot()
        await (container.get_blob_client('blob2')).upload_blob(data)

        # Act
        blobs = []
        async for b in container.list_blobs(include="snapshots"):
            blobs.append(b)

        # Assert
        self.assertEqual(len(blobs), 3)
        self.assertEqual(blobs[0].name, 'blob1')
        self.assertIsNotNone(blobs[0].snapshot)
        self.assertEqual(blobs[1].name, 'blob1')
        self.assertIsNone(blobs[1].snapshot)
        self.assertEqual(blobs[2].name, 'blob2')
        self.assertIsNone(blobs[2].snapshot)

    @record
    def test_list_blobs_with_include_snapshots(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs_with_include_snapshots())

    async def _test_list_blobs_with_include_metadata(self):
        # Arrange
        pytest.skip("Waiting on metadata XML fix in msrest")
        container = await self._create_container()
        data = b'hello world'
        blob1 = container.get_blob_client('blob1')
        await blob1.upload_blob(data, metadata={'number': '1', 'name': 'bob'})
        await blob1.create_snapshot()
        cr = container.get_blob_client('blob2')
        await cr.upload_blob(data, metadata={'number': '2', 'name': 'car'})

        # Act
        blobs = []
        async for b in container.list_blobs(include="metadata"):
            blobs.append(b)

        # Assert
        self.assertEqual(len(blobs), 2)
        self.assertEqual(blobs[0].name, 'blob1')
        self.assertEqual(blobs[0].metadata['number'], '1')
        self.assertEqual(blobs[0].metadata['name'], 'bob')
        self.assertEqual(blobs[1].name, 'blob2')
        self.assertEqual(blobs[1].metadata['number'], '2')
        self.assertEqual(blobs[1].metadata['name'], 'car')

    @record
    def test_list_blobs_with_include_metadata(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs_with_include_metadata())

    async def _test_list_blobs_with_include_uncommittedblobs(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'
        blob1 = container.get_blob_client('blob1')
        await blob1.stage_block('1', b'AAA')
        await blob1.stage_block('2', b'BBB')
        await blob1.stage_block('3', b'CCC')

        blob2 = container.get_blob_client('blob2')
        await blob2.upload_blob(data, metadata={'number': '2', 'name': 'car'})

        # Act
        blobs = []
        async for b in container.list_blobs(include="uncommittedblobs"):
            blobs.append(b)

        # Assert
        self.assertEqual(len(blobs), 2)
        self.assertEqual(blobs[0].name, 'blob1')
        self.assertEqual(blobs[1].name, 'blob2')

    @record
    def test_list_blobs_with_include_uncommittedblobs(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_blobs_with_include_uncommittedblobs())

    async def _test_list_blobs_with_include_copy(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'
        await (container.get_blob_client('blob1')).upload_blob(
            data, metadata={'status': 'original'})
        sourceblob = 'https://{0}.blob.core.windows.net/{1}/blob1'.format(
            self.settings.STORAGE_ACCOUNT_NAME, container.container_name)

        blobcopy = container.get_blob_client('blob1copy')
        await blobcopy.start_copy_from_url(sourceblob,
                                           metadata={'status': 'copy'})

        # Act
        blobs = []
        async for b in container.list_blobs(include="copy"):
            blobs.append(b)

        # Assert
        self.assertEqual(len(blobs), 2)
        self.assertEqual(blobs[0].name, 'blob1')
        self.assertEqual(blobs[1].name, 'blob1copy')
        self.assertEqual(blobs[1].blob_type, blobs[0].blob_type)
        self.assertEqual(blobs[1].size, 11)
        self.assertEqual(blobs[1].content_settings.content_type,
                         'application/octet-stream')
        self.assertEqual(blobs[1].content_settings.cache_control, None)
        self.assertEqual(blobs[1].content_settings.content_encoding, None)
        self.assertEqual(blobs[1].content_settings.content_language, None)
        self.assertEqual(blobs[1].content_settings.content_disposition, None)
        self.assertNotEqual(blobs[1].content_settings.content_md5, None)
        self.assertEqual(blobs[1].lease.status, 'unlocked')
        self.assertEqual(blobs[1].lease.state, 'available')
        self.assertNotEqual(blobs[1].copy.id, None)
        self.assertEqual(blobs[1].copy.source, sourceblob)
        self.assertEqual(blobs[1].copy.status, 'success')
        self.assertEqual(blobs[1].copy.progress, '11/11')
        self.assertNotEqual(blobs[1].copy.completion_time, None)

    @record
    def test_list_blobs_with_include_copy(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs_with_include_copy())

    async def _test_list_blobs_with_delimiter(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'

        cr0 = container.get_blob_client('a/blob1')
        await cr0.upload_blob(data)
        cr1 = container.get_blob_client('a/blob2')
        await cr1.upload_blob(data)
        cr2 = container.get_blob_client('b/blob3')
        await cr2.upload_blob(data)
        cr4 = container.get_blob_client('blob4')
        await cr4.upload_blob(data)

        # Act
        resp = []
        async for w in container.walk_blobs():
            resp.append(w)

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), 3)
        self.assertNamedItemInContainer(resp, 'a/')
        self.assertNamedItemInContainer(resp, 'b/')
        self.assertNamedItemInContainer(resp, 'blob4')

    @record
    def test_list_blobs_with_delimiter(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs_with_delimiter())

    async def _test_walk_blobs_with_delimiter(self):
        # Arrange
        container = await self._create_container()
        data = b'hello world'

        cr0 = container.get_blob_client('a/blob1')
        await cr0.upload_blob(data)
        cr1 = container.get_blob_client('a/blob2')
        await cr1.upload_blob(data)
        cr2 = container.get_blob_client('b/c/blob3')
        await cr2.upload_blob(data)
        cr3 = container.get_blob_client('blob4')
        await cr3.upload_blob(data)

        blob_list = []

        def recursive_walk(prefix):
            for b in prefix:
                if b.get('prefix'):
                    recursive_walk(b)
                else:
                    blob_list.append(b.name)

        # Act
        recursive_walk(container.walk_blobs())

        # Assert
        self.assertEqual(len(blob_list), 4)
        self.assertEqual(blob_list,
                         ['a/blob1', 'a/blob2', 'b/c/blob3', 'blob4'])

    @pytest.mark.skip
    def test_walk_blobs_with_delimiter(self):
        if TestMode.need_recording_file(self.test_mode):
            return
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_walk_blobs_with_delimiter())

    async def _test_list_blobs_with_include_multiple(self):
        # Arrange
        pytest.skip("Waiting on metadata XML fix in msrest")
        container = await self._create_container()
        data = b'hello world'
        blob1 = container.get_blob_client('blob1')
        await blob1.upload_blob(data, metadata={'number': '1', 'name': 'bob'})
        await blob1.create_snapshot()

        client = container.get_blob_client('blob2')
        await client.upload_blob(data, metadata={'number': '2', 'name': 'car'})

        # Act
        blobs = []
        async for b in container.list_blobs(include=["snapshots", "metadata"]):
            blobs.append(b)

        # Assert
        self.assertEqual(len(blobs), 3)
        self.assertEqual(blobs[0].name, 'blob1')
        self.assertIsNotNone(blobs[0].snapshot)
        self.assertEqual(blobs[0].metadata['number'], '1')
        self.assertEqual(blobs[0].metadata['name'], 'bob')
        self.assertEqual(blobs[1].name, 'blob1')
        self.assertIsNone(blobs[1].snapshot)
        self.assertEqual(blobs[1].metadata['number'], '1')
        self.assertEqual(blobs[1].metadata['name'], 'bob')
        self.assertEqual(blobs[2].name, 'blob2')
        self.assertIsNone(blobs[2].snapshot)
        self.assertEqual(blobs[2].metadata['number'], '2')
        self.assertEqual(blobs[2].metadata['name'], 'car')

    @record
    def test_list_blobs_with_include_multiple(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_blobs_with_include_multiple())

    async def _test_shared_access_container(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        container = await self._create_container()
        blob_name = 'blob1'
        data = b'hello world'

        blob = container.get_blob_client(blob_name)
        await blob.upload_blob(data)

        token = container.generate_shared_access_signature(
            expiry=datetime.utcnow() + timedelta(hours=1),
            permission=ContainerPermissions.READ,
        )
        blob = BlobClient(blob.url, credential=token)

        # Act
        response = requests.get(blob.url)

        # Assert
        self.assertTrue(response.ok)
        self.assertEqual(data, response.content)

    @record
    def test_shared_access_container(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_shared_access_container())

    async def _test_web_container_normal_operations_working(self):
        web_container = "web"

        # create the web container in case it does not exist yet
        container = self.bsc.get_container_client(web_container)
        try:
            try:
                created = await container.create_container()
                self.assertIsNotNone(created)
            except ResourceExistsError:
                pass

            # test if web container exists
            exist = await container.get_container_properties()
            self.assertTrue(exist)

            # create a blob
            blob_name = self.get_resource_name("blob")
            blob_content = self.get_random_text_data(1024)
            blob = container.get_blob_client(blob_name)
            await blob.upload_blob(blob_content)

            # get a blob
            blob_data = await (await blob.download_blob()).content_as_bytes()
            self.assertIsNotNone(blob)
            self.assertEqual(blob_data.decode('utf-8'), blob_content)

        finally:
            # delete container
            await container.delete_container()

    @record
    def test_web_container_normal_operations_working(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_web_container_normal_operations_working())
Ejemplo n.º 12
0
class AzBlobManagerAsync:
    """A utility class to help working with Azure Blob Storage.
        This class implements asynchronous methods based on the
        Microsoft Python SDK azure.storage.blob.aio
    See:
        https://docs.microsoft.com/en-us/python/api/azure-storage-blob/azure.storage.blob.aio?view=azure-python

    Available:
        - Basic methods to work with containers and blobs

    """
    @classmethod
    def create(cls, connection_string=None, account_url=None, credential=None):
        """Instantiate an asynchronous AzBlobManagerAsync object.

        Args:
            connection_string (str): A connection string to an Azure Storage account.
            account_url (str): The URL to the blob storage account. Any other entities included in
                the URL path (e.g. container or blob) will be discarded. This URL can be
                optionally authenticated with a SAS token.
            credential (str):  The credentials with which to authenticate. This is optional
                if the account URL already has a SAS token, or the connection string already
                has shared access key values. The value can be a SAS token string, an account
                shared access key, or an instance of a TokenCredentials class from azure.identity.
                Credentials provided here will take precedence over those in the connection string.

        Returns:
            AzBlobManagerAsync object

        Examples:
            Creating the AzBlobManagerAsync with account url and a shared access key:
            azStorageManager = AzBlobManagerAsync.create(account_url=self.url, credential=self.shared_access_key)

            Creating the AzBlobManagerAsync with a connection string that has the shared access key:
            azStorageManager = AzBlobManagerAsync.create(connection_string='DefaultEndpointsProtocol=http;...')

        """
        self = AzBlobManagerAsync()
        self.connection_string = connection_string
        self.account_url = account_url
        self.credential = credential

        from azure.storage.blob.aio import BlobServiceClient
        self.blob_service_client = BlobServiceClient
        if (self.connection_string is not None):
            # Create BlobServiceClient from a Connection String
            self.blob_service_client = BlobServiceClient.from_connection_string(
                conn_str=self.connection_string, credential=self.credential)
        else:
            # Creating the BlobServiceClient with account url and credential.
            self.blob_service_client = BlobServiceClient(
                account_url=self.account_url, credential=self.credential)

        return self

    def _logAzureError(self, err=AzureError):
        msg = err.message.split('\n')[0]
        logger.error(f'AzureError error: {msg}')

    async def create_container(self, container_name):
        """Creates a new container.

        Args:
            container_name (str): The name of the container.
            See https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata
                for naming convention

        Returns:
         bool: The return value. True for success, False otherwise.
        """
        success = False
        try:
            new_container = await self.blob_service_client.create_container(
                container_name)
            properties = await new_container.get_container_properties()
            success = properties is not None and properties.name == container_name
        except ResourceExistsError:
            logger.info(f'Container \"{container_name}\" already exists.')
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
        return success

    async def delete_container(self, container_name):
        """Deletes a container.

        Args:
            container_name (str): The name of the container.

        Returns:
         bool: The return value. True for success, False otherwise.
        """
        success = False
        try:
            await self.blob_service_client.delete_container(container_name)
            success = True
        except ResourceNotFoundError:
            logger.info(f'Container \"{container_name}\" doesn not exist.')
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
        return success

    async def _list_containers(self,
                               name_starts_with=None,
                               include_metadata=False):
        """Lists containers.

        Args:
            name_starts_with (str): Filters the results to return only containers whose names
                begin with the specified prefix.
            include_metadata (bool): Specifies that container metadata to be returned in the response.

        Returns:
            ItemPaged[ContainerProperties]: An iterable (auto-paging) of ContainerProperties.
        """

        try:
            containers = []
            async for container in self.blob_service_client.list_containers(
                    name_starts_with=name_starts_with,
                    include_metadata=include_metadata):
                containers.append(container)
            return containers
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
        return None

    async def list_containers_name(self, name_starts_with=None):
        """Lists containers' name.

        Args:
           name_starts_with (str): Filters the results to return only containers whose names
               begin with the specified prefix.

        Returns:
           list: A list of strings representing the container names.
        """

        containers_list = []
        containers = await self._list_containers(
            name_starts_with=name_starts_with, include_metadata=False)
        if (containers is None):
            return containers_list
        for container in containers:
            containers_list.append(container['name'])
        return containers_list

    async def create_append_blob(self,
                                 container_name,
                                 blob_name,
                                 replace_blob=False):
        """Creates an append blob in an existing container.

        Args:
            container_name (str): The name of the container.
            blob_name (str): The name of the blob.
            replace_blob (bool): If True, deletes existing blob with same name

        Returns:
         bool: The return value. True for success, False otherwise.
        """
        success = False
        try:
            blob_client = self.blob_service_client.get_blob_client(
                container_name, blob_name)
            # raise ResourceNotFoundError if blob does not exist
            await blob_client.get_blob_properties()
            # blob exists already
            if (replace_blob is True):
                await blob_client.create_append_blob()
            success = True
        except ResourceNotFoundError:
            await blob_client.create_append_blob()
            success = True
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
        return success

    async def create_page_blob(self,
                               container_name,
                               blob_name,
                               size=1024,
                               content_settings=None,
                               metadata=None,
                               premium_page_blob_tier=None):
        """Creates a page blob in an existing container.

        Args:
            container_name (str): The name of the container.
            blob_name (str): The name of the blob.
            size (int): This specifies the maximum size for the page blob, up to 1 TB.
                The page blob size must be aligned to a 512-byte boundary
            content_settings (ContentSettings): ContentSettings object used to set blob properties.
                Used to set content type, encoding, language, disposition, md5, and cache control.
            metadata (dict(str, str)): Name-value pairs associated with the blob as metadata
            premium_page_blob_tier (PremiumPageBlobTier): A page blob tier value to set the blob to
        Returns:
         bool: The return value. True for success, False otherwise.
        """
        success = False
        try:
            blob_client = self.blob_service_client.get_blob_client(
                container_name, blob_name)
            await blob_client.create_page_blob(size, content_settings,
                                               metadata,
                                               premium_page_blob_tier)
            success = True
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
        return success

    async def delete_blob(self, container_name, blob_name):
        """Deletes a blob.

        Args:
            container_name (str): The name of the container.
            blob_name (str): The name of the blob.

        Returns:
         bool: The return value. True for success, False otherwise.
        """
        success = False
        try:
            blob_client = self.blob_service_client.get_blob_client(
                container_name, blob_name)
            await blob_client.delete_blob()
            success = True
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
        return success

    async def list_blobs(self, container_name):
        """Lists the blobs in the specified container.

        Args:
            container_name (str): The name of the container.

        Returns:
            list: A list of strings representing the blob names.
        """

        blobs_list = []
        try:
            container_client = self.blob_service_client.get_container_client(
                container_name)
            async for blob in container_client.list_blobs():
                blobs_list.append(blob)
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception(f'Fatal error')
        return blobs_list

    async def upload_data(self,
                          data,
                          container_name,
                          blob_name,
                          blob_type='BlockBlob'):
        """Creates a new blob from a data source with automatic chunking.

        Args:
            data: The blob data to upload.
            container_name (str): The name of the container.
            blob_name (str): The name of the blob.
            blob_typr (str): The type of the blob. This can be either BlockBlob, PageBlob or AppendBlob.

        Returns:
            bool: The return value. True for success, False otherwise.
        """

        success = False
        try:
            blob_client = self.blob_service_client.get_blob_client(
                container_name, blob_name)
            await blob_client.upload_blob(data)
            success = True
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
        return success

    async def append_block(self, data, container_name, blob_name):
        """Commits a new block of data to the end of the existing append blob.

        Args:
            data: Content of the block.
            container_name (str): The name of the container.
            blob_name (str): The name of the blob.

        Returns:
            bool: The return value. True for success, False otherwise.
        """

        success = False
        try:
            blob_client = self.blob_service_client.get_blob_client(
                container_name, blob_name)
            await blob_client.append_block(data)
            success = True
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
        return success

    async def download_data(self, container_name, blob_name):
        """Downloads a blob.

        Args:
            container_name (str): The name of the container.
            blob_name (str): The name of the blob.
        Returns:
            stream: The data stream
        """
        try:
            blob_client = self.blob_service_client.get_blob_client(
                container_name, blob_name)
            stream = await blob_client.download_blob()
            return await stream.readall()
        except AzureError as err:
            self._logAzureError(err=err)
        except Exception:
            logger.exception('')
class StorageBlobEncryptionTestAsync(AsyncStorageTestCase):
    # --Helpers-----------------------------------------------------------------

    async def _setup(self, storage_account, key):
        # test chunking functionality by reducing the threshold
        # for chunking and the size of each chunk, otherwise
        # the tests would take too long to execute
        self.bsc = BlobServiceClient(self.account_url(storage_account, "blob"),
                                     credential=key,
                                     max_single_put_size=32 * 1024,
                                     max_block_size=4 * 1024,
                                     max_page_size=4 * 1024,
                                     max_single_get_size=4 * 1024,
                                     transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')
        self.blob_types = (BlobType.BlockBlob, BlobType.PageBlob,
                           BlobType.AppendBlob)
        self.bytes = b'Foo'

        if self.is_live:
            container = self.bsc.get_container_client(self.container_name)
            try:
                await container.create_container()
            except:
                pass

    def _teardown(self, file_name):
        if path.isfile(file_name):
            try:
                remove(file_name)
            except:
                pass

    def _get_container_reference(self):
        return self.get_resource_name(TEST_CONTAINER_PREFIX)

    def _get_blob_reference(self, blob_type):
        return self.get_resource_name(TEST_BLOB_PREFIXES[blob_type.value])

    async def _create_small_blob(self, blob_type):
        blob_name = self._get_blob_reference(blob_type)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(self.bytes, blob_type=blob_type)
        return blob

    # --Test cases for blob encryption ----------------------------------------

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_missing_attribute_kek_wrap_async(self, resource_group,
                                                    location, storage_account,
                                                    storage_account_key):
        # In the shared method _generate_blob_encryption_key
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        valid_key = KeyWrapper('key1')

        # Act
        invalid_key_1 = lambda: None  # functions are objects, so this effectively creates an empty object
        invalid_key_1.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm
        invalid_key_1.get_kid = valid_key.get_kid
        # No attribute wrap_key
        self.bsc.key_encryption_key = invalid_key_1
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

        invalid_key_2 = lambda: None  # functions are objects, so this effectively creates an empty object
        invalid_key_2.wrap_key = valid_key.wrap_key
        invalid_key_2.get_kid = valid_key.get_kid
        # No attribute get_key_wrap_algorithm
        self.bsc.key_encryption_key = invalid_key_2
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

        invalid_key_3 = lambda: None  # functions are objects, so this effectively creates an empty object
        invalid_key_3.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm
        invalid_key_3.wrap_key = valid_key.wrap_key
        # No attribute get_kid
        self.bsc.key_encryption_key = invalid_key_2
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_invalid_value_kek_wrap_async(self, resource_group, location,
                                                storage_account,
                                                storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')

        self.bsc.key_encryption_key.get_key_wrap_algorithm = None
        try:
            await self._create_small_blob(BlobType.BlockBlob)
            self.fail()
        except AttributeError as e:
            self.assertEqual(
                str(e),
                _ERROR_OBJECT_INVALID.format('key encryption key',
                                             'get_key_wrap_algorithm'))

        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.key_encryption_key.get_kid = None
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.key_encryption_key.wrap_key = None
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_missing_attribute_kek_unwrap_async(self, resource_group,
                                                      location,
                                                      storage_account,
                                                      storage_account_key):
        # Shared between all services in decrypt_blob
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        valid_key = KeyWrapper('key1')
        self.bsc.key_encryption_key = valid_key
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        # Note that KeyWrapper has a default value for key_id, so these Exceptions
        # are not due to non_matching kids.
        invalid_key_1 = lambda: None  #functions are objects, so this effectively creates an empty object
        invalid_key_1.get_kid = valid_key.get_kid
        #No attribute unwrap_key
        blob.key_encryption_key = invalid_key_1
        with self.assertRaises(HttpResponseError):
            await (await blob.download_blob()).content_as_bytes()

        invalid_key_2 = lambda: None  #functions are objects, so this effectively creates an empty object
        invalid_key_2.unwrap_key = valid_key.unwrap_key
        blob.key_encryption_key = invalid_key_2
        #No attribute get_kid
        with self.assertRaises(HttpResponseError):
            await (await blob.download_blob()).content_as_bytes()

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_invalid_value_kek_unwrap_async(self, resource_group,
                                                  location, storage_account,
                                                  storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        blob.key_encryption_key = KeyWrapper('key1')
        blob.key_encryption_key.unwrap_key = None

        with self.assertRaises(HttpResponseError) as e:
            await (await blob.download_blob()).content_as_bytes()
        self.assertEqual(str(e.exception), 'Decryption failed.')

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_kek_async(self, resource_group, location,
                                      storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        content = await (await blob.download_blob()).content_as_bytes()

        # Assert
        self.assertEqual(content, self.bytes)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_resolver_async(self, resource_group, location,
                                           storage_account,
                                           storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        key_resolver = KeyResolver()
        key_resolver.put_key(self.bsc.key_encryption_key)
        self.bsc.key_resolver_function = key_resolver.resolve_key
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        self.bsc.key_encryption_key = None
        content = await (await blob.download_blob()).content_as_bytes()

        # Assert
        self.assertEqual(content, self.bytes)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_kek_RSA_async(self, resource_group, location,
                                          storage_account,
                                          storage_account_key):
        # We can only generate random RSA keys, so this must be run live or
        # the playback test will fail due to a change in kek values.

        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = RSAKeyWrapper('key2')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        content = await blob.download_blob()
        data = b""
        async for d in content.chunks():
            data += d

        # Assert
        self.assertEqual(data, self.bytes)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_nonmatching_kid_async(self, resource_group,
                                                  location, storage_account,
                                                  storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        self.bsc.key_encryption_key.kid = 'Invalid'

        # Assert
        with self.assertRaises(HttpResponseError) as e:
            await (await blob.download_blob()).content_as_bytes()
        self.assertEqual(str(e.exception), 'Decryption failed.')

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_blob_invalid_stream_type_async(self, resource_group,
                                                      location,
                                                      storage_account,
                                                      storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        small_stream = StringIO(u'small')
        large_stream = StringIO(u'large' * self.config.max_single_put_size)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Assert
        # Block blob specific single shot
        with self.assertRaises(TypeError) as e:
            await blob.upload_blob(small_stream, length=5)
        self.assertTrue(
            'Blob data should be of type bytes.' in str(e.exception))

        # Generic blob chunked
        with self.assertRaises(TypeError) as e:
            await blob.upload_blob(large_stream)
        self.assertTrue(
            'Blob data should be of type bytes.' in str(e.exception))

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_blob_chnking_reqd_mult_of_block_size_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(self.config.max_single_put_size +
                                        self.config.max_block_size)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=3)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=3)

        # Assert
        self.assertEqual(content, blob_content)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_blob_chnking_reqd_non_mult_of_block_size_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = urandom(self.config.max_single_put_size + 1)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=3)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=3)

        # Assert
        self.assertEqual(content, blob_content)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_blob_chunking_required_range_specified_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(self.config.max_single_put_size * 2)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content,
                               length=self.config.max_single_put_size + 53,
                               max_concurrency=3)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=3)

        # Assert
        self.assertEqual(content[:self.config.max_single_put_size + 53],
                         blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_block_blob_single_shot_async(self, resource_group,
                                                    location, storage_account,
                                                    storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = b'small'
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob()).content_as_bytes()

        # Assert
        self.assertEqual(content, blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_blob_range_async(self, resource_group, location,
                                        storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        content = b'Random repeats' * self.config.max_single_put_size * 5

        # All page blob uploads call _upload_chunks, so this will test the ability
        # of that function to handle ranges even though it's a small blob
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content[2:],
                               length=self.config.max_single_put_size + 5,
                               max_concurrency=1)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=1)

        # Assert
        self.assertEqual(content[2:2 + self.config.max_single_put_size + 5],
                         blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_blob_empty_async(self, resource_group, location,
                                        storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = b''
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=2)

        # Assert
        self.assertEqual(content, blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_blob_serial_upload_chunking_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(self.config.max_single_put_size + 1)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=1)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=1)

        # Assert
        self.assertEqual(content, blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_beginning_to_middle_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=1)
        blob_content = await (await blob.download_blob(
            offset=0, length=50)).content_as_bytes(max_concurrency=1)

        # Assert
        self.assertEqual(content[:50], blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_middle_to_end_async(self, resource_group,
                                                      location,
                                                      storage_account,
                                                      storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=1)
        blob_content = await (await blob.download_blob(
            offset=100, length=28)).content_as_bytes()
        blob_content2 = await (await blob.download_blob(offset=100
                                                        )).content_as_bytes()

        # Assert
        self.assertEqual(content[100:], blob_content)
        self.assertEqual(content[100:], blob_content2)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_middle_to_middle_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob(
            offset=5, length=93)).content_as_bytes()

        # Assert
        self.assertEqual(content[5:98], blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_aligns_on_16_byte_block_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob(
            offset=48, length=16)).content_as_bytes()

        # Assert
        self.assertEqual(content[48:64], blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_expnded_to_begin_bloc_align_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob(
            offset=5, length=50)).content_as_bytes()

        # Assert
        self.assertEqual(content[5:55], blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_expanded_to_beginning_iv_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob(
            offset=22, length=20)).content_as_bytes()

        # Assert
        self.assertEqual(content[22:42], blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_put_blob_strict_mode_async(self, resource_group, location,
                                              storage_account,
                                              storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        content = urandom(512)

        # Assert
        for service in self.blob_types:
            blob_name = self._get_blob_reference(service)
            blob = self.bsc.get_blob_client(self.container_name, blob_name)

            with self.assertRaises(ValueError):
                await blob.upload_blob(content, blob_type=service)

            stream = BytesIO(content)
            with self.assertRaises(ValueError):
                await blob.upload_blob(stream, length=512, blob_type=service)

            file_name = 'strict_mode_async.temp.dat'
            with open(file_name, 'wb') as stream:
                stream.write(content)
            with open(file_name, 'rb') as stream:
                with self.assertRaises(ValueError):
                    await blob.upload_blob(stream, blob_type=service)

            with self.assertRaises(ValueError):
                await blob.upload_blob('To encrypt', blob_type=service)
            self._teardown(file_name)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_strict_mode_no_policy_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        blob.key_encryption_key = None

        # Assert
        with self.assertRaises(ValueError):
            await (await blob.download_blob()).content_as_bytes()

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_strict_mode_unencrypted_blob_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account, storage_account_key)
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        blob.require_encryption = True
        blob.key_encryption_key = KeyWrapper('key1')

        # Assert
        with self.assertRaises(HttpResponseError):
            await (await blob.download_blob()).content_as_bytes()

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_invalid_methods_fail_block_async(self, resource_group,
                                                    location, storage_account,
                                                    storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Assert
        with self.assertRaises(ValueError) as e:
            await blob.stage_block('block1', urandom(32))
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

        with self.assertRaises(ValueError) as e:
            await blob.commit_block_list(['block1'])
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_invalid_methods_fail_append_async(self, resource_group,
                                                     location, storage_account,
                                                     storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob_name = self._get_blob_reference(BlobType.AppendBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Assert
        with self.assertRaises(ValueError) as e:
            await blob.append_block(urandom(32))
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

        with self.assertRaises(ValueError) as e:
            await blob.create_append_blob()
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

        # All append_from operations funnel into append_from_stream, so testing one is sufficient
        with self.assertRaises(ValueError) as e:
            await blob.upload_blob(b'To encrypt',
                                   blob_type=BlobType.AppendBlob)
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_invalid_methods_fail_page_async(self, resource_group,
                                                   location, storage_account,
                                                   storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob_name = self._get_blob_reference(BlobType.PageBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Assert
        with self.assertRaises(ValueError) as e:
            await blob.upload_page(urandom(512),
                                   offset=0,
                                   length=512,
                                   blob_type=BlobType.PageBlob)
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

        with self.assertRaises(ValueError) as e:
            await blob.create_page_blob(512)
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_validate_encryption_async(self, resource_group, location,
                                             storage_account,
                                             storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        kek = KeyWrapper('key1')
        self.bsc.key_encryption_key = kek
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        blob.require_encryption = False
        blob.key_encryption_key = None
        content = await blob.download_blob()
        data = await content.content_as_bytes()

        encryption_data = _dict_to_encryption_data(
            loads(content.properties.metadata['encryptiondata']))
        iv = encryption_data.content_encryption_IV
        content_encryption_key = _validate_and_unwrap_cek(
            encryption_data, kek, None)
        cipher = _generate_AES_CBC_cipher(content_encryption_key, iv)
        decryptor = cipher.decryptor()
        unpadder = PKCS7(128).unpadder()

        content = decryptor.update(data) + decryptor.finalize()
        content = unpadder.update(content) + unpadder.finalize()

        self.assertEqual(self.bytes, content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_create_block_blob_from_star_async(self, resource_group,
                                                     location, storage_account,
                                                     storage_account_key):
        await self._setup(storage_account, storage_account_key)
        await self._create_blob_from_star(BlobType.BlockBlob, "blob1",
                                          self.bytes, self.bytes)

        stream = BytesIO(self.bytes)
        await self._create_blob_from_star(BlobType.BlockBlob, "blob2",
                                          self.bytes, stream)

        file_name = 'block_star_async.temp.dat'
        with open(file_name, 'wb') as stream:
            stream.write(self.bytes)
        with open(file_name, 'rb') as stream:
            await self._create_blob_from_star(BlobType.BlockBlob, "blob3",
                                              self.bytes, stream)

        await self._create_blob_from_star(BlobType.BlockBlob, "blob4",
                                          b'To encrypt', 'To encrypt')
        self._teardown(file_name)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_create_page_blob_from_star_async(self, resource_group,
                                                    location, storage_account,
                                                    storage_account_key):
        await self._setup(storage_account, storage_account_key)
        content = self.get_random_bytes(512)
        await self._create_blob_from_star(BlobType.PageBlob, "blob1", content,
                                          content)

        stream = BytesIO(content)
        await self._create_blob_from_star(BlobType.PageBlob,
                                          "blob2",
                                          content,
                                          stream,
                                          length=512)

        file_name = 'page_star_async.temp.dat'
        with open(file_name, 'wb') as stream:
            stream.write(content)

        with open(file_name, 'rb') as stream:
            await self._create_blob_from_star(BlobType.PageBlob, "blob3",
                                              content, stream)
        self._teardown(file_name)

    async def _create_blob_from_star(self, blob_type, blob_name, content, data,
                                     **kwargs):
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        blob.key_encryption_key = KeyWrapper('key1')
        blob.require_encryption = True
        await blob.upload_blob(data, blob_type=blob_type, **kwargs)

        blob_content = await (await blob.download_blob()).content_as_bytes()
        self.assertEqual(content, blob_content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_star_async(self, resource_group, location,
                                          storage_account,
                                          storage_account_key):
        await self._setup(storage_account, storage_account_key)
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        content = await blob.download_blob()
        iter_blob = b""
        async for data in content.chunks():
            iter_blob += data
        bytes_blob = await (await blob.download_blob()).content_as_bytes()
        stream_blob = BytesIO()
        await (await blob.download_blob()).download_to_stream(stream_blob)
        stream_blob.seek(0)
        text_blob = await (await blob.download_blob()).content_as_text()

        # Assert
        self.assertEqual(self.bytes, iter_blob)
        self.assertEqual(self.bytes, bytes_blob)
        self.assertEqual(self.bytes, stream_blob.read())
        self.assertEqual(self.bytes.decode(), text_blob)
class StorageGetBlobTestAsync(AsyncStorageTestCase):

    # --Helpers-----------------------------------------------------------------

    async def _setup(self, name, key):
        self.bsc = BlobServiceClient(self.account_url(name, "blob"),
                                     credential=key,
                                     max_single_get_size=32 * 1024,
                                     max_chunk_get_size=4 * 1024,
                                     transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')
        self.byte_blob = self.get_resource_name('byteblob')
        self.byte_data = self.get_random_bytes(64 * 1024 + 5)
        if self.is_live:
            container = self.bsc.get_container_client(self.container_name)
            try:
                await container.create_container()
            except:
                pass

            blob = self.bsc.get_blob_client(self.container_name,
                                            self.byte_blob)
            await blob.upload_blob(self.byte_data, overwrite=True)

    def _teardown(self, file_name):
        if path.isfile(file_name):
            try:
                remove(file_name)
            except:
                pass

    def _get_blob_reference(self):
        return self.get_resource_name(TEST_BLOB_PREFIX)

    class NonSeekableFile(object):
        def __init__(self, wrapped_file):
            self.wrapped_file = wrapped_file

        def write(self, data):
            self.wrapped_file.write(data)

        def read(self, count):
            return self.wrapped_file.read(count)

        def seekable(self):
            return False

    # -- Get test cases for blobs ----------------------------------------------

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_unicode_get_blob_unicode_data_async(self, resource_group,
                                                       location,
                                                       storage_account,
                                                       storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_data = u'hello world啊齄丂狛狜'.encode('utf-8')
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        content = await blob.download_blob()

        # Assert
        self.assertIsInstance(content.properties, BlobProperties)
        self.assertEqual(await content.readall(), blob_data)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_unicode_get_blob_binary_data_async(self, resource_group,
                                                      location,
                                                      storage_account,
                                                      storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        base64_data = 'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w=='
        binary_data = base64.b64decode(base64_data)

        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(binary_data)

        # Act
        content = await blob.download_blob()

        # Assert
        self.assertIsInstance(content.properties, BlobProperties)
        self.assertEqual(await content.readall(), binary_data)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_no_content_async(self, resource_group, location,
                                             storage_account,
                                             storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_data = b''
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        content = await blob.download_blob()

        # Assert
        self.assertEqual(blob_data, await content.readall())
        self.assertEqual(0, content.properties.size)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_bytes_async(self, resource_group, location,
                                           storage_account,
                                           storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        content = await (await blob.download_blob(max_concurrency=2)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_to_bytes_with_single_byte_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        content = await (await blob.download_blob(offset=0,
                                                  length=1)).readall()

        # Assert
        self.assertEqual(1, len(content))
        self.assertEqual(self.byte_data[0], content[0])

        # Act
        content = await (await blob.download_blob(offset=5,
                                                  length=1)).readall()

        # Assert
        self.assertEqual(1, len(content))
        self.assertEqual(self.byte_data[5], content[0])

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_to_bytes_with_zero_byte_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account.name, storage_account_key)
        blob_data = b''
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        # the get request should fail in this case since the blob is empty and yet there is a range specified
        with self.assertRaises(HttpResponseError) as e:
            await blob.download_blob(offset=0, length=5)
        self.assertEqual(StorageErrorCode.invalid_range,
                         e.exception.error_code)

        with self.assertRaises(HttpResponseError) as e:
            await blob.download_blob(offset=3, length=5)
        self.assertEqual(StorageErrorCode.invalid_range,
                         e.exception.error_code)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_with_missing_start_range_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        await self._setup(storage_account.name, storage_account_key)
        blob_data = b'foobar'
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        # the get request should fail fast in this case since start_range is missing while end_range is specified
        with self.assertRaises(ValueError):
            await blob.download_blob(length=3)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_bytes_snapshot_async(self, resource_group,
                                                    location, storage_account,
                                                    storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
        snapshot_ref = await blob.create_snapshot()
        snapshot = self.bsc.get_blob_client(self.container_name,
                                            self.byte_blob,
                                            snapshot=snapshot_ref)

        await blob.upload_blob(
            self.byte_data,
            overwrite=True)  # Modify the blob so the Etag no longer matches

        # Act
        content = await (await
                         snapshot.download_blob(max_concurrency=2)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_bytes_with_progress_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await
                         blob.download_blob(raw_response_hook=callback,
                                            max_concurrency=2)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)
        self.assert_download_progress(len(self.byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_bytes_non_parallel_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await
                         blob.download_blob(raw_response_hook=callback,
                                            max_concurrency=1)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)
        self.assert_download_progress(len(self.byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_bytes_small_async(self, resource_group,
                                                 location, storage_account,
                                                 storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_data = self.get_random_bytes(1024)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await blob.download_blob(raw_response_hook=callback
                                                  )).readall()

        # Assert
        self.assertEqual(blob_data, content)
        self.assert_download_progress(len(blob_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_stream_async(self, resource_group, location,
                                            storage_account,
                                            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        FILE_PATH = 'get_blob_to_stream_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)
        self._teardown(FILE_PATH)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_stream_with_progress_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        FILE_PATH = 'blob_to_stream_with_progress_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(raw_response_hook=callback,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)
        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)
        self.assert_download_progress(len(self.byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)
        self._teardown(FILE_PATH)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_stream_non_parallel_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        FILE_PATH = 'blob_to_stream_non_parallel_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(raw_response_hook=callback,
                                                  max_concurrency=1)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)
        self.assert_download_progress(len(self.byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)
        self._teardown(FILE_PATH)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_stream_small_async(self, resource_group,
                                                  location, storage_account,
                                                  storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_data = self.get_random_bytes(1024)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        FILE_PATH = 'blob_to_stream_small_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(raw_response_hook=callback,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, 1024)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(blob_data, actual)
        self.assert_download_progress(len(blob_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)
        self._teardown(FILE_PATH)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_to_path_async(self, resource_group,
                                                 location, storage_account,
                                                 storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        end_range = self.config.max_single_get_size
        FILE_PATH = 'ranged_get_blob_to_path_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=end_range - 1,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, end_range - 1)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data[1:end_range], actual)
        self._teardown(FILE_PATH)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_to_path_with_progress_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        start_range = 3
        end_range = self.config.max_single_get_size + 1024
        FILE_PATH = 'get_blob_to_path_with_progress_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=start_range,
                                                  length=end_range,
                                                  raw_response_hook=callback,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, self.config.max_single_get_size + 1024)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(
                self.byte_data[start_range:end_range + start_range], actual)
        self.assert_download_progress(end_range,
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)
        self._teardown(FILE_PATH)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_to_path_small_async(self, resource_group,
                                                       location,
                                                       storage_account,
                                                       storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        FILE_PATH = 'get_blob_to_path_small_asyncc.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=4,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, 4)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data[1:5], actual)
        self._teardown(FILE_PATH)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_to_path_non_parallel_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        FILE_PATH = 'granged_get_blob_to_path_non_parallel_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=3,
                                                  max_concurrency=1)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, 3)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data[1:4], actual)
        self._teardown(FILE_PATH)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_to_path_invalid_range_parallel_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_size = self.config.max_single_get_size + 1
        blob_data = self.get_random_bytes(blob_size)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        FILE_PATH = 'path_invalid_range_parallel_async.temp.dat'
        end_range = 2 * self.config.max_single_get_size
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=end_range,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, blob_size)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(blob_data[1:blob_size], actual)
        self._teardown(FILE_PATH)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_ranged_get_blob_to_path_invalid_range_non_parallel_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_size = 1024
        blob_data = self.get_random_bytes(blob_size)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        end_range = 2 * self.config.max_single_get_size
        FILE_PATH = 'path_invalid_range_non_parallel_asy.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=end_range,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, blob_size)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(blob_data[1:blob_size], actual)
        self._teardown(FILE_PATH)
        # Assert

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_text_async(self, resource_group, location,
                                          storage_account,
                                          storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        text_blob = self.get_resource_name('textblob')
        text_data = self.get_random_text_data(self.config.max_single_get_size +
                                              1)
        blob = self.bsc.get_blob_client(self.container_name, text_blob)
        await blob.upload_blob(text_data)

        # Act
        stream = await blob.download_blob(max_concurrency=2, encoding='UTF-8')
        content = await stream.readall()

        # Assert
        self.assertEqual(text_data, content)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_text_with_progress_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        text_blob = self.get_resource_name('textblob')
        text_data = self.get_random_text_data(self.config.max_single_get_size +
                                              1)
        blob = self.bsc.get_blob_client(self.container_name, text_blob)
        await blob.upload_blob(text_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        stream = await blob.download_blob(raw_response_hook=callback,
                                          max_concurrency=2,
                                          encoding='UTF-8')
        content = await stream.readall()

        # Assert
        self.assertEqual(text_data, content)
        self.assert_download_progress(len(text_data.encode('utf-8')),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_text_non_parallel_async(self, resource_group,
                                                       location,
                                                       storage_account,
                                                       storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        text_blob = self._get_blob_reference()
        text_data = self.get_random_text_data(self.config.max_single_get_size +
                                              1)
        blob = self.bsc.get_blob_client(self.container_name, text_blob)
        await blob.upload_blob(text_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        stream = await blob.download_blob(raw_response_hook=callback,
                                          max_concurrency=1,
                                          encoding='UTF-8')
        content = await stream.readall()

        # Assert
        self.assertEqual(text_data, content)
        self.assert_download_progress(len(text_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_text_small_async(self, resource_group, location,
                                                storage_account,
                                                storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_data = self.get_random_text_data(1024)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        stream = await blob.download_blob(raw_response_hook=callback,
                                          encoding='UTF-8')
        content = await stream.readall()

        # Assert
        self.assertEqual(blob_data, content)
        self.assert_download_progress(len(blob_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_text_with_encoding_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        text = u'hello 啊齄丂狛狜 world'
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(text, encoding='utf-16')

        # Act
        stream = await blob.download_blob(encoding='utf-16')
        content = await stream.readall()

        # Assert
        self.assertEqual(text, content)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_text_with_encoding_and_progress_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        text = u'hello 啊齄丂狛狜 world'
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(text, encoding='utf-16')

        # Act
        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        stream = await blob.download_blob(raw_response_hook=callback,
                                          encoding='utf-16')
        content = await stream.readall()

        # Assert
        self.assertEqual(text, content)
        self.assert_download_progress(len(text.encode('utf-8')),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_non_seekable_async(self, resource_group, location,
                                               storage_account,
                                               storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        FILE_PATH = 'get_blob_non_seekable_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            non_seekable_stream = StorageGetBlobTestAsync.NonSeekableFile(
                stream)
            downloader = await blob.download_blob(max_concurrency=1)
            read_bytes = await downloader.readinto(non_seekable_stream)

        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)
        self._teardown(FILE_PATH)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_non_seekable_parallel_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        FILE_PATH = 'et_blob_non_seekable_parallel_asyn.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            non_seekable_stream = StorageGetBlobTestAsync.NonSeekableFile(
                stream)

            with self.assertRaises(ValueError):
                downloader = await blob.download_blob(max_concurrency=2)
                properties = await downloader.readinto(non_seekable_stream)
        self._teardown(FILE_PATH)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_stream_exact_get_size_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_name = self._get_blob_reference()
        byte_data = self.get_random_bytes(self.config.max_single_get_size)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(byte_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        FILE_PATH = 'stream_exact_get_size_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(raw_response_hook=callback,
                                                  max_concurrency=2)
            properties = await downloader.readinto(stream)

        # Assert
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(byte_data, actual)
        self.assert_download_progress(len(byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)
        self._teardown(FILE_PATH)

    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_exact_get_size_async(self, resource_group,
                                                 location, storage_account,
                                                 storage_account_key):
        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_name = self._get_blob_reference()
        byte_data = self.get_random_bytes(self.config.max_single_get_size)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(byte_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await blob.download_blob(raw_response_hook=callback
                                                  )).readall()

        # Assert
        self.assertEqual(byte_data, content)
        self.assert_download_progress(len(byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_exact_chunk_size_async(self, resource_group,
                                                   location, storage_account,
                                                   storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob_name = self._get_blob_reference()
        byte_data = self.get_random_bytes(self.config.max_single_get_size +
                                          self.config.max_chunk_get_size)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(byte_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await blob.download_blob(raw_response_hook=callback
                                                  )).readall()

        # Assert
        self.assertEqual(byte_data, content)
        self.assert_download_progress(len(byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_to_stream_with_md5_async(self, resource_group,
                                                     location, storage_account,
                                                     storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        FILE_PATH = 'lob_to_stream_with_md5_asyncc.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(validate_content=True,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)
        self._teardown(FILE_PATH)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_with_md5_async(self, resource_group, location,
                                           storage_account,
                                           storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        content = await (await
                         blob.download_blob(validate_content=True,
                                            max_concurrency=2)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_to_stream_with_overall_md5_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        # Arrange
        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
        props = await blob.get_blob_properties()
        props.content_settings.content_md5 = b'MDAwMDAwMDA='
        await blob.set_http_headers(props.content_settings)

        # Act
        FILE_PATH = 'range_to_stream_with_overall_md5_async.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=0,
                                                  length=1024,
                                                  validate_content=True,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, 1024)
        self.assertEqual(b'MDAwMDAwMDA=',
                         downloader.properties.content_settings.content_md5)
        self.assertEqual(downloader.size, 1024)
        self._teardown(FILE_PATH)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_with_overall_md5_async(
            self, resource_group, location, storage_account,
            storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
        content = await blob.download_blob(offset=0,
                                           length=1024,
                                           validate_content=True)

        # Arrange
        props = await blob.get_blob_properties()
        props.content_settings.content_md5 = b'MDAwMDAwMDA='
        await blob.set_http_headers(props.content_settings)

        # Act
        content = await blob.download_blob(offset=0,
                                           length=1024,
                                           validate_content=True)

        # Assert
        self.assertEqual(b'MDAwMDAwMDA=',
                         content.properties.content_settings.content_md5)

    @pytest.mark.live_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_range_with_range_md5_async(self, resource_group,
                                                       location,
                                                       storage_account,
                                                       storage_account_key):
        # parallel tests introduce random order of requests, can only run live

        await self._setup(storage_account.name, storage_account_key)
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
        content = await blob.download_blob(offset=0,
                                           length=1024,
                                           validate_content=True)

        # Arrange
        props = await blob.get_blob_properties()
        props.content_settings.content_md5 = None
        await blob.set_http_headers(props.content_settings)

        # Act
        content = await blob.download_blob(offset=0,
                                           length=1024,
                                           validate_content=True)

        # Assert
        self.assertIsNotNone(content.properties.content_settings.content_type)
        self.assertIsNone(content.properties.content_settings.content_md5)
        self.assertEqual(content.properties.size, 1024)


# ------------------------------------------------------------------------------
class StorageBlobTagsTest(AsyncStorageTestCase):

    async def _setup(self, storage_account, key):
        self.bsc = BlobServiceClient(self.account_url(storage_account, "blob"), credential=key)
        self.container_name = self.get_resource_name("container")
        if self.is_live:
            container = self.bsc.get_container_client(self.container_name)
            try:
                await container.create_container(timeout=5)
            except ResourceExistsError:
                pass
        self.byte_data = self.get_random_bytes(1024)

    #--Helpers-----------------------------------------------------------------
    def _get_blob_reference(self):
        return self.get_resource_name(TEST_BLOB_PREFIX)

    async def _create_block_blob(self, tags=None, container_name=None, blob_name=None):
        blob_name = blob_name or self._get_blob_reference()
        blob_client = self.bsc.get_blob_client(container_name or self.container_name, blob_name)
        resp = await blob_client.upload_blob(self.byte_data, length=len(self.byte_data), overwrite=True, tags=tags)
        return blob_client, resp

    async def _create_empty_block_blob(self):
        blob_name = self._get_blob_reference()
        blob_client = self.bsc.get_blob_client(self.container_name, blob_name)
        resp = await blob_client.upload_blob(b'', length=0, overwrite=True)
        return blob_client, resp

    async def _create_append_blob(self, tags=None):
        blob_name = self._get_blob_reference()
        blob_client = self.bsc.get_blob_client(self.container_name, blob_name)
        resp = await blob_client.create_append_blob(tags=tags)
        return blob_client, resp

    async def _create_page_blob(self, tags=None):
        blob_name = self._get_blob_reference()
        blob_client = self.bsc.get_blob_client(self.container_name, blob_name)
        resp = await blob_client.create_page_blob(tags=tags, size=512)
        return blob_client, resp

    async def _create_container(self, prefix="container"):
        container_name = self.get_resource_name(prefix)
        try:
            await self.bsc.create_container(container_name)
        except:
            pass
        return container_name

    #-- test cases for blob tags ----------------------------------------------

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_set_blob_tags(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        blob_client, _ = await self._create_block_blob()

        # Act
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        resp = await blob_client.set_blob_tags(tags)

        # Assert
        self.assertIsNotNone(resp)

    @pytest.mark.playback_test_only
    @GlobalStorageAccountPreparer()
    @AsyncStorageTestCase.await_prepared_test
    async def test_set_blob_tags_for_a_version(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        # use this version to set tag
        blob_client, resp = await self._create_block_blob()
        await self._create_block_blob()
        # TODO: enable versionid for this account and test set tag for a version

        # Act
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        resp = await blob_client.set_blob_tags(tags, version_id=resp['version_id'])

        # Assert
        self.assertIsNotNone(resp)

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_tags(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        blob_client, resp = await self._create_block_blob()

        # Act
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        await blob_client.set_blob_tags(tags)

        resp = await blob_client.get_blob_tags()

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), 3)
        for key, value in resp.items():
            self.assertEqual(tags[key], value)

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_tags_for_a_snapshot(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        tags = {"+-./:=_ ": "firsttag", "tag2": "+-./:=_", "+-./:=_1": "+-./:=_"}
        blob_client, resp = await self._create_block_blob(tags=tags)

        snapshot = await blob_client.create_snapshot()
        snapshot_client = self.bsc.get_blob_client(self.container_name, blob_client.blob_name, snapshot=snapshot)

        resp = await snapshot_client.get_blob_tags()

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), 3)
        for key, value in resp.items():
            self.assertEqual(tags[key], value)

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_upload_block_blob_with_tags(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        blob_client, resp = await self._create_block_blob(tags=tags)

        resp = await blob_client.get_blob_tags()

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), 3)

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_get_blob_properties_returns_tags_num(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        blob_client, resp = await self._create_block_blob(tags=tags)

        resp = await blob_client.get_blob_properties()
        downloaded = await blob_client.download_blob()

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(resp.tag_count, len(tags))
        self.assertEqual(downloaded.properties.tag_count, len(tags))

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_create_append_blob_with_tags(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        tags = {"+-./:=_ ": "firsttag", "tag2": "+-./:=_", "+-./:=_1": "+-./:=_"}
        blob_client, resp = await self._create_append_blob(tags=tags)

        resp = await blob_client.get_blob_tags()

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), 3)

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_create_page_blob_with_tags(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        blob_client, resp = await self._create_page_blob(tags=tags)

        resp = await blob_client.get_blob_tags()

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), 3)

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_commit_block_list_with_tags(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        blob_client, resp = await self._create_empty_block_blob()

        await blob_client.stage_block('1', b'AAA')
        await blob_client.stage_block('2', b'BBB')
        await blob_client.stage_block('3', b'CCC')

        # Act
        block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')]
        await blob_client.commit_block_list(block_list, tags=tags)

        resp = await blob_client.get_blob_tags()

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), len(tags))

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_start_copy_from_url_with_tags(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        blob_client, resp = await self._create_block_blob()

        # Act
        sourceblob = '{0}/{1}/{2}'.format(
            self.account_url(storage_account, "blob"), self.container_name, blob_client.blob_name)

        copyblob = self.bsc.get_blob_client(self.container_name, 'blob1copy')
        copy = await copyblob.start_copy_from_url(sourceblob, tags=tags)

        # Assert
        self.assertIsNotNone(copy)
        self.assertEqual(copy['copy_status'], 'success')
        self.assertFalse(isinstance(copy['copy_status'], Enum))
        self.assertIsNotNone(copy['copy_id'])

        copy_content = await (await copyblob.download_blob()).readall()
        self.assertEqual(copy_content, self.byte_data)

        resp = await copyblob.get_blob_tags()

        # Assert
        self.assertIsNotNone(resp)
        self.assertEqual(len(resp), len(tags))

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_list_blobs_returns_tags(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        await self._create_block_blob(tags=tags)
        container = self.bsc.get_container_client(self.container_name)
        blob_list = container.list_blobs(include="tags")

        #Assert
        async for blob in blob_list:
            self.assertEqual(blob.tag_count, len(tags))
            for key, value in blob.tags.items():
                self.assertEqual(tags[key], value)

    @GlobalResourceGroupPreparer()
    @StorageAccountPreparer(random_name_enabled=True, location="canadacentral", name_prefix='pytagstorage')
    @AsyncStorageTestCase.await_prepared_test
    async def test_filter_blobs(self, resource_group, location, storage_account, storage_account_key):
        await self._setup(storage_account, storage_account_key)
        container_name1 = await self._create_container(prefix="container1")
        container_name2 = await self._create_container(prefix="container2")
        container_name3 = await self._create_container(prefix="container3")

        tags = {"tag1": "firsttag", "tag2": "secondtag", "tag3": "thirdtag"}
        await self._create_block_blob(tags=tags, blob_name="blob1")
        await self._create_block_blob(tags=tags, blob_name="blob2", container_name=container_name1)
        await self._create_block_blob(tags=tags, blob_name="blob3", container_name=container_name2)
        await self._create_block_blob(tags=tags, blob_name="blob4", container_name=container_name3)

        if self.is_live:
            sleep(10)

        where = "tag1='firsttag'"
        blob_list = self.bsc.find_blobs_by_tags(filter_expression=where, results_per_page=2).by_page()
        first_page = await blob_list.__anext__()
        items_on_page1 = list()
        async for item in first_page:
            items_on_page1.append(item)
        second_page = await blob_list.__anext__()
        items_on_page2 = list()
        async for item in second_page:
            items_on_page2.append(item)

        self.assertEqual(2, len(items_on_page1))
        self.assertEqual(2, len(items_on_page2))
Ejemplo n.º 16
0
class StorageLoggingTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageLoggingTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        self.bsc = BlobServiceClient(url,
                                     credential=credential,
                                     transport=AiohttpTestTransport())
        self.container_name = self.get_resource_name('utcontainer')

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.delete_container(self.container_name))
            except:
                pass

        return super(StorageLoggingTestAsync, self).tearDown()

    async def _setup(self):
        if not self.is_playback():
            try:
                # create source blob to be copied from
                self.source_blob_name = self.get_resource_name('srcblob')
                self.source_blob_data = self.get_random_bytes(4 * 1024)
                source_blob = self.bsc.get_blob_client(self.container_name,
                                                       self.source_blob_name)

                await self.bsc.create_container(self.container_name)
                await source_blob.upload_blob(self.source_blob_data)

                # generate a SAS so that it is accessible with a URL
                sas_token = source_blob.generate_shared_access_signature(
                    permission=BlobPermissions.READ,
                    expiry=datetime.utcnow() + timedelta(hours=1),
                )
                sas_source = BlobClient(source_blob.url, credential=sas_token)
                self.source_blob_url = sas_source.url
            except:
                pass

    async def _test_authorization_is_scrubbed_off(self):
        await self._setup()
        # Arrange
        container = self.bsc.get_container_client(self.container_name)
        # Act
        with LogCaptured(self) as log_captured:
            await container.get_container_properties(logging_enable=True)
            log_as_str = log_captured.getvalue()
            # Assert
            # make sure authorization header is logged, but its value is not
            # the keyword SharedKey is present in the authorization header's value
            self.assertTrue(_AUTHORIZATION_HEADER_NAME in log_as_str)
            self.assertFalse('SharedKey' in log_as_str)

    @record
    def test_authorization_is_scrubbed_off(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_authorization_is_scrubbed_off())

    async def _test_sas_signature_is_scrubbed_off(self):
        # Test can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        await self._setup()
        # Arrange
        container = self.bsc.get_container_client(self.container_name)
        token = container.generate_shared_access_signature(
            permission=ContainerPermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        # parse out the signed signature
        token_components = parse_qs(token)
        signed_signature = quote(
            token_components[QueryStringConstants.SIGNED_SIGNATURE][0])

        sas_service = ContainerClient(container.url, credential=token)

        # Act
        with LogCaptured(self) as log_captured:
            await sas_service.get_account_information(logging_enable=True)
            log_as_str = log_captured.getvalue()

            # Assert
            # make sure the query parameter 'sig' is logged, but its value is not
            self.assertTrue(
                QueryStringConstants.SIGNED_SIGNATURE in log_as_str)
            self.assertFalse(signed_signature in log_as_str)

    @record
    def test_sas_signature_is_scrubbed_off(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_sas_signature_is_scrubbed_off())

    async def _test_copy_source_sas_is_scrubbed_off(self):
        # Test can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        await self._setup()
        # Arrange
        dest_blob_name = self.get_resource_name('destblob')
        dest_blob = self.bsc.get_blob_client(self.container_name,
                                             dest_blob_name)

        # parse out the signed signature
        token_components = parse_qs(self.source_blob_url)
        signed_signature = quote(
            token_components[QueryStringConstants.SIGNED_SIGNATURE][0])

        # Act
        with LogCaptured(self) as log_captured:
            await dest_blob.start_copy_from_url(self.source_blob_url,
                                                requires_sync=True,
                                                logging_enable=True)
            log_as_str = log_captured.getvalue()

            # Assert
            # make sure the query parameter 'sig' is logged, but its value is not
            self.assertTrue(
                QueryStringConstants.SIGNED_SIGNATURE in log_as_str)
            self.assertFalse(signed_signature in log_as_str)

            # make sure authorization header is logged, but its value is not
            # the keyword SharedKey is present in the authorization header's value
            self.assertTrue(_AUTHORIZATION_HEADER_NAME in log_as_str)
            self.assertFalse('SharedKey' in log_as_str)

    @record
    def test_copy_source_sas_is_scrubbed_off(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_copy_source_sas_is_scrubbed_off())
Ejemplo n.º 17
0
class BlobStorageAccountTestAsync(StorageTestCase):
    def setUp(self):
        super(BlobStorageAccountTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()
        self.bsc = BlobServiceClient(url, credential=credential, transport=AiohttpTestTransport())
        self.container_name = self.get_resource_name('utcontainer')

        # if not self.is_playback():
        #     self.bsc.create_container(self.container_name)

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(self.bsc.delete_container(self.container_name))
            except:
                pass

        return super(BlobStorageAccountTestAsync, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    async def _setup(self):
        if not self.is_playback():
            try:
                await self.bsc.create_container(self.container_name)
            except:
                pass

    def _get_blob_reference(self):
        blob_name = self.get_resource_name(TEST_BLOB_PREFIX)
        return self.bsc.get_blob_client(self.container_name, blob_name)

    async def _create_blob(self):
        blob = self._get_blob_reference()
        await blob.upload_blob(b'')
        return blob

    async def assertBlobEqual(self, container_name, blob_name, expected_data):
        blob = self.bsc.get_blob_client(container_name, blob_name)
        actual_data = await blob.download_blob().content_as_bytes()
        self.assertEqual(actual_data, expected_data)

    # --Tests specific to Blob Storage Accounts (not general purpose)------------

    async def _test_standard_blob_tier_set_tier_api(self):
        await self._setup()
        container = self.bsc.get_container_client(self.container_name)
        tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot]

        for tier in tiers:
            blob = self._get_blob_reference()
            data = b'hello world'
            await blob.upload_blob(data)

            blob_ref = await blob.get_blob_properties()
            self.assertIsNotNone(blob_ref.blob_tier)
            self.assertTrue(blob_ref.blob_tier_inferred)
            self.assertIsNone(blob_ref.blob_tier_change_time)

            blobs = []
            async for b in container.list_blobs():
                blobs.append(b)

            # Assert
            self.assertIsNotNone(blobs)
            self.assertGreaterEqual(len(blobs), 1)
            self.assertIsNotNone(blobs[0])
            self.assertNamedItemInContainer(blobs, blob.blob_name)
            self.assertIsNotNone(blobs[0].blob_tier)
            self.assertTrue(blobs[0].blob_tier_inferred)
            self.assertIsNone(blobs[0].blob_tier_change_time)

            await blob.set_standard_blob_tier(tier)

            blob_ref2 = await blob.get_blob_properties()
            self.assertEqual(tier, blob_ref2.blob_tier)
            self.assertFalse(blob_ref2.blob_tier_inferred)
            self.assertIsNotNone(blob_ref2.blob_tier_change_time)

            blobs = []
            async for b in container.list_blobs():
                blobs.append(b)

            # Assert
            self.assertIsNotNone(blobs)
            self.assertGreaterEqual(len(blobs), 1)
            self.assertIsNotNone(blobs[0])
            self.assertNamedItemInContainer(blobs, blob.blob_name)
            self.assertEqual(blobs[0].blob_tier, tier)
            self.assertFalse(blobs[0].blob_tier_inferred)
            self.assertIsNotNone(blobs[0].blob_tier_change_time)

            await blob.delete_blob()

    @record
    def test_standard_blob_tier_set_tier_api(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_standard_blob_tier_set_tier_api())

    async def _test_rehydration_status(self):
        await self._setup()
        blob_name = 'rehydration_test_blob_1'
        blob_name2 = 'rehydration_test_blob_2'
        container = self.bsc.get_container_client(self.container_name)

        data = b'hello world'
        blob = await container.upload_blob(blob_name, data)
        await blob.set_standard_blob_tier(StandardBlobTier.Archive)
        await blob.set_standard_blob_tier(StandardBlobTier.Cool)

        blob_ref = await blob.get_blob_properties()
        self.assertEqual(StandardBlobTier.Archive, blob_ref.blob_tier)
        self.assertEqual("rehydrate-pending-to-cool", blob_ref.archive_status)
        self.assertFalse(blob_ref.blob_tier_inferred)

        blobs = []
        async for b in container.list_blobs():
            blobs.append(b)

        await blob.delete_blob()

        # Assert
        self.assertIsNotNone(blobs)
        self.assertGreaterEqual(len(blobs), 1)
        self.assertIsNotNone(blobs[0])
        self.assertNamedItemInContainer(blobs, blob.blob_name)
        self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier)
        self.assertEqual("rehydrate-pending-to-cool", blobs[0].archive_status)
        self.assertFalse(blobs[0].blob_tier_inferred)

        blob2 = await container.upload_blob(blob_name2, data)
        await blob2.set_standard_blob_tier(StandardBlobTier.Archive)
        await blob2.set_standard_blob_tier(StandardBlobTier.Hot)

        blob_ref2 = await blob2.get_blob_properties()
        self.assertEqual(StandardBlobTier.Archive, blob_ref2.blob_tier)
        self.assertEqual("rehydrate-pending-to-hot", blob_ref2.archive_status)
        self.assertFalse(blob_ref2.blob_tier_inferred)

        blobs = []
        async for b in container.list_blobs():
            blobs.append(b)

        # Assert
        self.assertIsNotNone(blobs)
        self.assertGreaterEqual(len(blobs), 1)
        self.assertIsNotNone(blobs[0])
        self.assertNamedItemInContainer(blobs, blob2.blob_name)
        self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier)
        self.assertEqual("rehydrate-pending-to-hot", blobs[0].archive_status)
        self.assertFalse(blobs[0].blob_tier_inferred)

    @record
    def test_rehydration_status(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_rehydration_status())