class StorageAppendBlobTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageAppendBlobTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        self.bsc = BlobServiceClient(url,
                                     credential=credential,
                                     max_block_size=4 * 1024,
                                     transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.delete_container(self.container_name))
            except:
                pass

        if os.path.isfile(FILE_PATH):
            try:
                os.remove(FILE_PATH)
            except:
                pass

        return super(StorageAppendBlobTestAsync, self).tearDown()

    #--Helpers-----------------------------------------------------------------

    async def _setup(self):
        if not self.is_playback():
            try:
                await self.bsc.create_container(self.container_name)
            except:
                pass

    def _get_blob_reference(self):
        return self.get_resource_name(TEST_BLOB_PREFIX)

    async def _create_blob(self):
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.create_append_blob()
        return blob

    async def assertBlobEqual(self, blob, expected_data):
        stream = await blob.download_blob()
        actual_data = await stream.content_as_bytes()
        self.assertEqual(actual_data, expected_data)

    class NonSeekableFile(object):
        def __init__(self, wrapped_file):
            self.wrapped_file = wrapped_file

        def write(self, data):
            self.wrapped_file.write(data)

        def read(self, count):
            return self.wrapped_file.read(count)

    #--Test cases for append blobs --------------------------------------------

    async def _test_create_blob_async(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()

        # Act
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        create_resp = await blob.create_append_blob()

        # Assert
        blob_properties = await blob.get_blob_properties()
        self.assertIsNotNone(blob_properties)
        self.assertEqual(blob_properties.etag, create_resp.get('etag'))
        self.assertEqual(blob_properties.last_modified,
                         create_resp.get('last_modified'))

    @record
    def test_create_blob_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_async())

    async def _test_create_blob_with_lease_id_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        lease = await blob.acquire_lease()
        create_resp = await blob.create_append_blob(lease=lease)

        # Assert
        blob_properties = await blob.get_blob_properties()
        self.assertIsNotNone(blob_properties)
        self.assertEqual(blob_properties.etag, create_resp.get('etag'))
        self.assertEqual(blob_properties.last_modified,
                         create_resp.get('last_modified'))

    @record
    def test_create_blob_with_lease_id_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_with_lease_id_async())

    async def _test_create_blob_with_metadata_async(self):
        # Arrange
        await self._setup()
        metadata = {'hello': 'world', 'number': '42'}
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.create_append_blob(metadata=metadata)

        # Assert
        md = await blob.get_blob_properties()
        self.assertDictEqual(md.metadata, metadata)

    @record
    def test_create_blob_with_metadata_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_with_metadata_async())

    async def _test_append_block_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        for i in range(5):
            resp = await blob.append_block(
                u'block {0}'.format(i).encode('utf-8'))
            self.assertEqual(int(resp['blob_append_offset']), 7 * i)
            self.assertEqual(resp['blob_committed_block_count'], i + 1)
            self.assertIsNotNone(resp['etag'])
            self.assertIsNotNone(resp['last_modified'])

        # Assert
        await self.assertBlobEqual(blob,
                                   b'block 0block 1block 2block 3block 4')

    @record
    def test_append_block_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_block_async())

    async def _test_append_block_unicode_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        resp = await blob.append_block(u'啊齄丂狛狜', encoding='utf-16')
        self.assertEqual(int(resp['blob_append_offset']), 0)
        self.assertEqual(resp['blob_committed_block_count'], 1)
        self.assertIsNotNone(resp['etag'])
        self.assertIsNotNone(resp['last_modified'])

        # Assert

    @record
    def test_append_block_unicode_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_block_unicode_async())

    async def _test_append_block_with_md5_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        resp = await blob.append_block(b'block', validate_content=True)
        self.assertEqual(int(resp['blob_append_offset']), 0)
        self.assertEqual(resp['blob_committed_block_count'], 1)
        self.assertIsNotNone(resp['etag'])
        self.assertIsNotNone(resp['last_modified'])

        # Assert

    @record
    def test_append_block_with_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_block_with_md5_async())

    async def _test_create_append_blob_with_no_overwrite_async(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data1 = self.get_random_bytes(LARGE_BLOB_SIZE)
        data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512)

        # Act
        create_resp = await blob.upload_blob(data1,
                                             overwrite=True,
                                             blob_type=BlobType.AppendBlob,
                                             metadata={'BlobData': 'Data1'})

        update_resp = await blob.upload_blob(data2,
                                             overwrite=False,
                                             blob_type=BlobType.AppendBlob,
                                             metadata={'BlobData': 'Data2'})

        props = await blob.get_blob_properties()

        # Assert
        appended_data = data1 + data2
        await self.assertBlobEqual(blob, appended_data)
        self.assertEqual(props.etag, update_resp.get('etag'))
        self.assertEqual(props.blob_type, BlobType.AppendBlob)
        self.assertEqual(props.last_modified, update_resp.get('last_modified'))
        self.assertEqual(props.metadata, {'BlobData': 'Data1'})
        self.assertEqual(props.size, LARGE_BLOB_SIZE + LARGE_BLOB_SIZE + 512)

    @record
    def test_create_append_blob_with_no_overwrite_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_append_blob_with_no_overwrite_async())

    async def _test_create_append_blob_with_overwrite_async(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data1 = self.get_random_bytes(LARGE_BLOB_SIZE)
        data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512)

        # Act
        create_resp = await blob.upload_blob(data1,
                                             overwrite=True,
                                             blob_type=BlobType.AppendBlob,
                                             metadata={'BlobData': 'Data1'})
        update_resp = await blob.upload_blob(data2,
                                             overwrite=True,
                                             blob_type=BlobType.AppendBlob,
                                             metadata={'BlobData': 'Data2'})

        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(blob, data2)
        self.assertEqual(props.etag, update_resp.get('etag'))
        self.assertEqual(props.last_modified, update_resp.get('last_modified'))
        self.assertEqual(props.metadata, {'BlobData': 'Data2'})
        self.assertEqual(props.blob_type, BlobType.AppendBlob)
        self.assertEqual(props.size, LARGE_BLOB_SIZE + 512)

    @record
    def test_create_append_blob_with_overwrite_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_append_blob_with_overwrite_async())

    async def _test_append_blob_from_bytes_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        data = b'abcdefghijklmnopqrstuvwxyz'
        append_resp = await blob.upload_blob(data,
                                             blob_type=BlobType.AppendBlob)
        blob_properties = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(blob, data)
        self.assertEqual(blob_properties.etag, append_resp['etag'])
        self.assertEqual(blob_properties.last_modified,
                         append_resp['last_modified'])

    @record
    def test_append_blob_from_bytes_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_blob_from_bytes_async())

    async def _test_append_blob_from_0_bytes_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        data = b''
        append_resp = await blob.upload_blob(data,
                                             blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data)
        # appending nothing should not make any network call
        self.assertIsNone(append_resp.get('etag'))
        self.assertIsNone(append_resp.get('last_modified'))

    @record
    def test_append_blob_from_0_bytes_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_blob_from_0_bytes_async())

    async def _test_append_blob_from_bytes_with_progress_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = b'abcdefghijklmnopqrstuvwxyz'

        # Act
        progress = []

        def progress_gen(upload):
            progress.append((0, len(upload)))
            yield upload

        upload_data = progress_gen(data)
        await blob.upload_blob(upload_data, blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_append_blob_from_bytes_with_progress_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_bytes_with_progress_async())

    async def _test_append_blob_from_bytes_with_index_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        data = b'abcdefghijklmnopqrstuvwxyz'
        await blob.upload_blob(data[3:], blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data[3:])

    @record
    def test_append_blob_from_bytes_with_index_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_bytes_with_index_async())

    async def _test_append_blob_from_bytes_with_index_and_count_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        data = b'abcdefghijklmnopqrstuvwxyz'
        await blob.upload_blob(data[3:],
                               length=5,
                               blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data[3:8])

    @record
    def test_append_blob_from_bytes_with_index_and_count_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_bytes_with_index_and_count_async())

    async def _test_append_blob_from_bytes_chunked_upload_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        append_resp = await blob.upload_blob(data,
                                             blob_type=BlobType.AppendBlob)
        blob_properties = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(blob, data)
        self.assertEqual(blob_properties.etag, append_resp['etag'])
        self.assertEqual(blob_properties.last_modified,
                         append_resp.get('last_modified'))

    @record
    def test_append_blob_from_bytes_chunked_upload_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_bytes_chunked_upload_async())

    async def _test_append_blob_from_bytes_with_progress_chunked_upload_async(
            self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        progress = []

        def progress_gen(upload):
            n = self.config.max_block_size
            total = len(upload)
            current = 0
            while upload:
                progress.append((current, total))
                yield upload[:n]
                current += len(upload[:n])
                upload = upload[n:]

        upload_data = progress_gen(data)
        await blob.upload_blob(upload_data, blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_append_blob_from_bytes_with_progress_chunked_upload_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_append_blob_from_bytes_with_progress_chunked_upload_async())

    async def _test_append_blob_from_bytes_chunked_upload_with_index_and_count_async(
            self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        index = 33
        blob_size = len(data) - 66

        # Act
        await blob.upload_blob(data[index:],
                               length=blob_size,
                               blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data[index:index + blob_size])

    @record
    def test_append_blob_from_bytes_chunked_upload_with_index_and_count_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_append_blob_from_bytes_chunked_upload_with_index_and_count_async(
            ))

    async def _test_append_blob_from_path_chunked_upload_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            append_resp = await blob.upload_blob(stream,
                                                 blob_type=BlobType.AppendBlob)

        blob_properties = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(blob, data)
        self.assertEqual(blob_properties.etag, append_resp.get('etag'))
        self.assertEqual(blob_properties.last_modified,
                         append_resp.get('last_modified'))

    @record
    def test_append_blob_from_path_chunked_upload_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_path_chunked_upload_async())

    async def _test_append_blob_from_path_with_progress_chunked_upload_async(
            self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        progress = []

        def progress_gen(upload):
            n = self.config.max_block_size
            total = LARGE_BLOB_SIZE
            current = 0
            while upload:
                chunk = upload.read(n)
                if not chunk:
                    break
                progress.append((current, total))
                yield chunk
                current += len(chunk)

        with open(FILE_PATH, 'rb') as stream:
            upload_data = progress_gen(stream)
            await blob.upload_blob(upload_data, blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_append_blob_from_path_with_progress_chunked_upload_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_append_blob_from_path_with_progress_chunked_upload_async())

    async def _test_append_blob_from_stream_chunked_upload_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            append_resp = await blob.upload_blob(stream,
                                                 blob_type=BlobType.AppendBlob)
        blob_properties = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(blob, data)
        self.assertEqual(blob_properties.etag, append_resp.get('etag'))
        self.assertEqual(blob_properties.last_modified,
                         append_resp.get('last_modified'))

    @record
    def test_append_blob_from_stream_chunked_upload_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_stream_chunked_upload_async())

    async def _test_append_blob_from_stream_non_seekable_chunked_upload_known_size_async(
            self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)
        blob_size = len(data) - 66

        # Act
        with open(FILE_PATH, 'rb') as stream:
            non_seekable_file = StorageAppendBlobTestAsync.NonSeekableFile(
                stream)
            await blob.upload_blob(non_seekable_file,
                                   length=blob_size,
                                   blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data[:blob_size])

    @record
    def test_append_blob_from_stream_non_seekable_chunked_upload_known_size_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_append_blob_from_stream_non_seekable_chunked_upload_known_size_async(
            ))

    async def _test_append_blob_from_stream_non_seekable_chunked_upload_unknown_size_async(
            self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            non_seekable_file = StorageAppendBlobTestAsync.NonSeekableFile(
                stream)
            await blob.upload_blob(non_seekable_file,
                                   blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data)

    @record
    def test_append_blob_from_stream_non_seekable_chunked_upload_unknown_size_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_append_blob_from_stream_non_seekable_chunked_upload_unknown_size_async(
            ))

    async def _test_append_blob_from_stream_with_multiple_appends_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream1:
            stream1.write(data)
        with open(FILE_PATH, 'wb') as stream2:
            stream2.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream1:
            await blob.upload_blob(stream1, blob_type=BlobType.AppendBlob)
        with open(FILE_PATH, 'rb') as stream2:
            await blob.upload_blob(stream2, blob_type=BlobType.AppendBlob)

        # Assert
        data = data * 2
        await self.assertBlobEqual(blob, data)

    @record
    def test_append_blob_from_stream_with_multiple_appends_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_stream_with_multiple_appends_async())

    async def _test_append_blob_from_stream_chunked_upload_with_count_async(
            self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        blob_size = len(data) - 301
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   length=blob_size,
                                   blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data[:blob_size])

    @record
    def test_append_blob_from_stream_chunked_upload_with_count_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_stream_chunked_upload_with_count_async(
            ))

    async def _test_append_blob_from_stream_chunked_upload_with_count_parallel_async(
            self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        blob_size = len(data) - 301
        with open(FILE_PATH, 'rb') as stream:
            append_resp = await blob.upload_blob(stream,
                                                 length=blob_size,
                                                 blob_type=BlobType.AppendBlob)
        blob_properties = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(blob, data[:blob_size])
        self.assertEqual(blob_properties.etag, append_resp.get('etag'))
        self.assertEqual(blob_properties.last_modified,
                         append_resp.get('last_modified'))

    @record
    def test_append_blob_from_stream_chunked_upload_with_count_parallel_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_append_blob_from_stream_chunked_upload_with_count_parallel_async(
            ))

    async def _test_append_blob_from_text_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        text = u'hello 啊齄丂狛狜 world'
        data = text.encode('utf-8')

        # Act
        append_resp = await blob.upload_blob(text,
                                             blob_type=BlobType.AppendBlob)
        blob_properties = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(blob, data)
        self.assertEqual(blob_properties.etag, append_resp.get('etag'))
        self.assertEqual(blob_properties.last_modified,
                         append_resp.get('last_modified'))

    @record
    def test_append_blob_from_text_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_blob_from_text_async())

    async def _test_append_blob_from_text_with_encoding_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        text = u'hello 啊齄丂狛狜 world'
        data = text.encode('utf-16')

        # Act
        await blob.upload_blob(text,
                               encoding='utf-16',
                               blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, data)

    @record
    def test_append_blob_from_text_with_encoding_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_text_with_encoding_async())

    async def _test_append_blob_from_text_with_encoding_and_progress_async(
            self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        text = u'hello 啊齄丂狛狜 world'
        data = text.encode('utf-16')

        # Act
        progress = []

        def progress_gen(upload):
            progress.append((0, len(data)))
            yield upload

        upload_data = progress_gen(text)
        await blob.upload_blob(upload_data,
                               encoding='utf-16',
                               blob_type=BlobType.AppendBlob)

        # Assert
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_append_blob_from_text_with_encoding_and_progress_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_text_with_encoding_and_progress_async(
            ))

    async def _test_append_blob_from_text_chunked_upload_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = self.get_random_text_data(LARGE_BLOB_SIZE)
        encoded_data = data.encode('utf-8')

        # Act
        await blob.upload_blob(data, blob_type=BlobType.AppendBlob)

        # Assert
        await self.assertBlobEqual(blob, encoded_data)

    @record
    def test_append_blob_from_text_chunked_upload_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_blob_from_text_chunked_upload_async())

    async def _test_append_blob_with_md5_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()
        data = b'hello world'

        # Act
        await blob.append_block(data, validate_content=True)

        # Assert

    @record
    def test_append_blob_with_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_blob_with_md5_async())
示例#2
0
class BlobStorageAccountTestAsync(StorageTestCase):
    def setUp(self):
        super(BlobStorageAccountTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()
        self.bsc = BlobServiceClient(url, credential=credential, transport=AiohttpTestTransport())
        self.container_name = self.get_resource_name('utcontainer')

        # if not self.is_playback():
        #     self.bsc.create_container(self.container_name)

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(self.bsc.delete_container(self.container_name))
            except:
                pass

        return super(BlobStorageAccountTestAsync, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    async def _setup(self):
        if not self.is_playback():
            try:
                await self.bsc.create_container(self.container_name)
            except:
                pass

    def _get_blob_reference(self):
        blob_name = self.get_resource_name(TEST_BLOB_PREFIX)
        return self.bsc.get_blob_client(self.container_name, blob_name)

    async def _create_blob(self):
        blob = self._get_blob_reference()
        await blob.upload_blob(b'')
        return blob

    async def assertBlobEqual(self, container_name, blob_name, expected_data):
        blob = self.bsc.get_blob_client(container_name, blob_name)
        actual_data = await blob.download_blob().content_as_bytes()
        self.assertEqual(actual_data, expected_data)

    # --Tests specific to Blob Storage Accounts (not general purpose)------------

    async def _test_standard_blob_tier_set_tier_api(self):
        await self._setup()
        container = self.bsc.get_container_client(self.container_name)
        tiers = [StandardBlobTier.Archive, StandardBlobTier.Cool, StandardBlobTier.Hot]

        for tier in tiers:
            blob = self._get_blob_reference()
            data = b'hello world'
            await blob.upload_blob(data)

            blob_ref = await blob.get_blob_properties()
            self.assertIsNotNone(blob_ref.blob_tier)
            self.assertTrue(blob_ref.blob_tier_inferred)
            self.assertIsNone(blob_ref.blob_tier_change_time)

            blobs = []
            async for b in container.list_blobs():
                blobs.append(b)

            # Assert
            self.assertIsNotNone(blobs)
            self.assertGreaterEqual(len(blobs), 1)
            self.assertIsNotNone(blobs[0])
            self.assertNamedItemInContainer(blobs, blob.blob_name)
            self.assertIsNotNone(blobs[0].blob_tier)
            self.assertTrue(blobs[0].blob_tier_inferred)
            self.assertIsNone(blobs[0].blob_tier_change_time)

            await blob.set_standard_blob_tier(tier)

            blob_ref2 = await blob.get_blob_properties()
            self.assertEqual(tier, blob_ref2.blob_tier)
            self.assertFalse(blob_ref2.blob_tier_inferred)
            self.assertIsNotNone(blob_ref2.blob_tier_change_time)

            blobs = []
            async for b in container.list_blobs():
                blobs.append(b)

            # Assert
            self.assertIsNotNone(blobs)
            self.assertGreaterEqual(len(blobs), 1)
            self.assertIsNotNone(blobs[0])
            self.assertNamedItemInContainer(blobs, blob.blob_name)
            self.assertEqual(blobs[0].blob_tier, tier)
            self.assertFalse(blobs[0].blob_tier_inferred)
            self.assertIsNotNone(blobs[0].blob_tier_change_time)

            await blob.delete_blob()

    @record
    def test_standard_blob_tier_set_tier_api(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_standard_blob_tier_set_tier_api())

    async def _test_rehydration_status(self):
        await self._setup()
        blob_name = 'rehydration_test_blob_1'
        blob_name2 = 'rehydration_test_blob_2'
        container = self.bsc.get_container_client(self.container_name)

        data = b'hello world'
        blob = await container.upload_blob(blob_name, data)
        await blob.set_standard_blob_tier(StandardBlobTier.Archive)
        await blob.set_standard_blob_tier(StandardBlobTier.Cool)

        blob_ref = await blob.get_blob_properties()
        self.assertEqual(StandardBlobTier.Archive, blob_ref.blob_tier)
        self.assertEqual("rehydrate-pending-to-cool", blob_ref.archive_status)
        self.assertFalse(blob_ref.blob_tier_inferred)

        blobs = []
        async for b in container.list_blobs():
            blobs.append(b)

        await blob.delete_blob()

        # Assert
        self.assertIsNotNone(blobs)
        self.assertGreaterEqual(len(blobs), 1)
        self.assertIsNotNone(blobs[0])
        self.assertNamedItemInContainer(blobs, blob.blob_name)
        self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier)
        self.assertEqual("rehydrate-pending-to-cool", blobs[0].archive_status)
        self.assertFalse(blobs[0].blob_tier_inferred)

        blob2 = await container.upload_blob(blob_name2, data)
        await blob2.set_standard_blob_tier(StandardBlobTier.Archive)
        await blob2.set_standard_blob_tier(StandardBlobTier.Hot)

        blob_ref2 = await blob2.get_blob_properties()
        self.assertEqual(StandardBlobTier.Archive, blob_ref2.blob_tier)
        self.assertEqual("rehydrate-pending-to-hot", blob_ref2.archive_status)
        self.assertFalse(blob_ref2.blob_tier_inferred)

        blobs = []
        async for b in container.list_blobs():
            blobs.append(b)

        # Assert
        self.assertIsNotNone(blobs)
        self.assertGreaterEqual(len(blobs), 1)
        self.assertIsNotNone(blobs[0])
        self.assertNamedItemInContainer(blobs, blob2.blob_name)
        self.assertEqual(StandardBlobTier.Archive, blobs[0].blob_tier)
        self.assertEqual("rehydrate-pending-to-hot", blobs[0].archive_status)
        self.assertFalse(blobs[0].blob_tier_inferred)

    @record
    def test_rehydration_status(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_rehydration_status())
示例#3
0
class StorageBlockBlobTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageBlockBlobTestAsync, self).setUp()

        url = self._get_account_url()

        # test chunking functionality by reducing the size of each chunk,
        # otherwise the tests would take too long to execute
        self.bsc = BlobServiceClient(
            url,
            credential=self.settings.STORAGE_ACCOUNT_KEY,
            connection_data_block_size=4 * 1024,
            max_single_put_size=32 * 1024,
            max_block_size=4 * 1024,
            transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.delete_container(self.container_name))
            except:
                pass

        if os.path.isfile(FILE_PATH):
            try:
                os.remove(FILE_PATH)
            except:
                pass

        return super(StorageBlockBlobTestAsync, self).tearDown()

    #--Helpers-----------------------------------------------------------------
    async def _setup(self):
        if not self.is_playback():
            try:
                await self.bsc.create_container(self.container_name)
            except ResourceExistsError:
                pass

    def _get_blob_reference(self):
        return self.get_resource_name(TEST_BLOB_PREFIX)

    async def _create_blob(self):
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(b'')
        return blob

    async def assertBlobEqual(self, container_name, blob_name, expected_data):
        await self._setup()
        blob = self.bsc.get_blob_client(container_name, blob_name)
        stream = await blob.download_blob()
        actual_data = await stream.content_as_bytes()
        self.assertEqual(actual_data, expected_data)

    class NonSeekableFile(object):
        def __init__(self, wrapped_file):
            self.wrapped_file = wrapped_file

        def write(self, data):
            self.wrapped_file.write(data)

        def read(self, count):
            return self.wrapped_file.read(count)

    #--Test cases for block blobs --------------------------------------------

    async def _test_put_block(self):
        await self._setup()
        # Arrange
        blob = await self._create_blob()

        # Act
        for i in range(5):
            resp = await blob.stage_block(
                i, 'block {0}'.format(i).encode('utf-8'))
            self.assertIsNone(resp)

        # Assert
    @record
    def test_put_block(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block())

    async def _test_put_block_unicode(self):
        await self._setup()
        # Arrange
        blob = await self._create_blob()

        # Act
        resp = await blob.stage_block('1', u'啊齄丂狛狜')
        self.assertIsNone(resp)

        # Assert

    @record
    def test_put_block_unicode(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_unicode())

    async def _test_put_block_with_md5(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        await blob.stage_block(1, b'block', validate_content=True)

        # Assert

    @record
    def test_put_block_with_md5(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_with_md5())

    async def _test_put_block_list(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.stage_block('1', b'AAA')
        await blob.stage_block('2', b'BBB')
        await blob.stage_block('3', b'CCC')

        # Act
        block_list = [
            BlobBlock(block_id='1'),
            BlobBlock(block_id='2'),
            BlobBlock(block_id='3')
        ]
        put_block_list_resp = await blob.commit_block_list(block_list)

        # Assert
        content = await blob.download_blob()
        actual = await content.content_as_bytes()
        self.assertEqual(actual, b'AAABBBCCC')
        self.assertEqual(content.properties.etag,
                         put_block_list_resp.get('etag'))
        self.assertEqual(content.properties.last_modified,
                         put_block_list_resp.get('last_modified'))

    @record
    def test_put_block_list(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_list())

    async def _test_put_block_list_invalid_block_id(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.stage_block('1', b'AAA')
        await blob.stage_block('2', b'BBB')
        await blob.stage_block('3', b'CCC')

        # Act
        try:
            block_list = [
                BlobBlock(block_id='1'),
                BlobBlock(block_id='2'),
                BlobBlock(block_id='4')
            ]
            await blob.commit_block_list(block_list)
            self.fail()
        except HttpResponseError as e:
            self.assertGreaterEqual(
                str(e).find('specified block list is invalid'), 0)

        # Assert

    @record
    def test_put_block_list_invalid_block_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_list_invalid_block_id())

    async def _test_put_block_list_with_md5(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.stage_block('1', b'AAA')
        await blob.stage_block('2', b'BBB')
        await blob.stage_block('3', b'CCC')

        # Act
        block_list = [
            BlobBlock(block_id='1'),
            BlobBlock(block_id='2'),
            BlobBlock(block_id='3')
        ]
        await blob.commit_block_list(block_list, validate_content=True)

        # Assert
    @record
    def test_put_block_list_with_md5(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_list_with_md5())

    async def _test_put_block_list_with_blob_tier_specified(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob_client = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob_client.stage_block('1', b'AAA')
        await blob_client.stage_block('2', b'BBB')
        await blob_client.stage_block('3', b'CCC')
        blob_tier = StandardBlobTier.Cool

        # Act
        block_list = [
            BlobBlock(block_id='1'),
            BlobBlock(block_id='2'),
            BlobBlock(block_id='3')
        ]
        await blob_client.commit_block_list(block_list,
                                            standard_blob_tier=blob_tier)

        # Assert
        blob_properties = await blob_client.get_blob_properties()
        self.assertEqual(blob_properties.blob_tier, blob_tier)

    @record
    def test_put_block_list_with_blob_tier_specified_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_put_block_list_with_blob_tier_specified())

    async def _test_get_block_list_no_blocks(self):
        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        block_list = await blob.get_block_list('all')

        # Assert
        self.assertIsNotNone(block_list)
        self.assertEqual(len(block_list[1]), 0)
        self.assertEqual(len(block_list[0]), 0)

    @record
    def test_get_block_list_no_blocks(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_block_list_no_blocks())

    async def _test_get_block_list_uncommitted_blocks(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.stage_block('1', b'AAA')
        await blob.stage_block('2', b'BBB')
        await blob.stage_block('3', b'CCC')

        # Act
        block_list = await blob.get_block_list('uncommitted')

        # Assert
        self.assertIsNotNone(block_list)
        self.assertEqual(len(block_list), 2)
        self.assertEqual(len(block_list[1]), 3)
        self.assertEqual(len(block_list[0]), 0)
        self.assertEqual(block_list[1][0].id, '1')
        self.assertEqual(block_list[1][0].size, 3)
        self.assertEqual(block_list[1][1].id, '2')
        self.assertEqual(block_list[1][1].size, 3)
        self.assertEqual(block_list[1][2].id, '3')
        self.assertEqual(block_list[1][2].size, 3)

    @record
    def test_get_block_list_uncommitted_blocks(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_block_list_uncommitted_blocks())

    async def _test_get_block_list_committed_blocks(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.stage_block('1', b'AAA')
        await blob.stage_block('2', b'BBB')
        await blob.stage_block('3', b'CCC')

        block_list = [
            BlobBlock(block_id='1'),
            BlobBlock(block_id='2'),
            BlobBlock(block_id='3')
        ]
        await blob.commit_block_list(block_list)

        # Act
        block_list = await blob.get_block_list('committed')

        # Assert
        self.assertIsNotNone(block_list)
        self.assertEqual(len(block_list), 2)
        self.assertEqual(len(block_list[1]), 0)
        self.assertEqual(len(block_list[0]), 3)
        self.assertEqual(block_list[0][0].id, '1')
        self.assertEqual(block_list[0][0].size, 3)
        self.assertEqual(block_list[0][1].id, '2')
        self.assertEqual(block_list[0][1].size, 3)
        self.assertEqual(block_list[0][2].id, '3')
        self.assertEqual(block_list[0][2].size, 3)

    @record
    def test_get_block_list_committed_blocks(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_block_list_committed_blocks())

    async def _test_create_small_block_blob_with_no_overwrite(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data1 = b'hello world'
        data2 = b'hello second world'

        # Act
        create_resp = await blob.upload_blob(data1, overwrite=True)

        with self.assertRaises(ResourceExistsError):
            await blob.upload_blob(data2, overwrite=False)

        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data1)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))
        self.assertEqual(props.blob_type, BlobType.BlockBlob)

    @record
    def test_create_small_block_blob_with_no_overwrite(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_small_block_blob_with_no_overwrite())

    async def _test_create_small_block_blob_with_overwrite(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data1 = b'hello world'
        data2 = b'hello second world'

        # Act
        create_resp = await blob.upload_blob(data1, overwrite=True)
        update_resp = await blob.upload_blob(data2, overwrite=True)

        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data2)
        self.assertEqual(props.etag, update_resp.get('etag'))
        self.assertEqual(props.last_modified, update_resp.get('last_modified'))
        self.assertEqual(props.blob_type, BlobType.BlockBlob)

    @record
    def test_create_small_block_blob_with_overwrite(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_small_block_blob_with_overwrite())

    async def _test_create_large_block_blob_with_no_overwrite(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data1 = self.get_random_bytes(LARGE_BLOB_SIZE)
        data2 = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        create_resp = await blob.upload_blob(data1,
                                             overwrite=True,
                                             metadata={'BlobData': 'Data1'})

        with self.assertRaises(ResourceExistsError):
            await blob.upload_blob(data2,
                                   overwrite=False,
                                   metadata={'BlobData': 'Data2'})

        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data1)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))
        self.assertEqual(props.blob_type, BlobType.BlockBlob)
        self.assertEqual(props.metadata, {'BlobData': 'Data1'})
        self.assertEqual(props.size, LARGE_BLOB_SIZE)

    @record
    def test_create_large_block_blob_with_no_overwrite(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_large_block_blob_with_no_overwrite())

    async def _test_create_large_block_blob_with_overwrite(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data1 = self.get_random_bytes(LARGE_BLOB_SIZE)
        data2 = self.get_random_bytes(LARGE_BLOB_SIZE + 512)

        # Act
        create_resp = await blob.upload_blob(data1,
                                             overwrite=True,
                                             metadata={'BlobData': 'Data1'})
        update_resp = await blob.upload_blob(data2,
                                             overwrite=True,
                                             metadata={'BlobData': 'Data2'})

        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data2)
        self.assertEqual(props.etag, update_resp.get('etag'))
        self.assertEqual(props.last_modified, update_resp.get('last_modified'))
        self.assertEqual(props.blob_type, BlobType.BlockBlob)
        self.assertEqual(props.metadata, {'BlobData': 'Data2'})
        self.assertEqual(props.size, LARGE_BLOB_SIZE + 512)

    @record
    def test_create_large_block_blob_with_overwrite(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_large_block_blob_with_overwrite())

    async def _test_create_blob_from_bytes_single_put(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = b'hello world'

        # Act
        create_resp = await blob.upload_blob(data)
        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))

    @record
    def test_create_blob_from_bytes_single_put(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_from_bytes_single_put())

    async def _test_create_blob_from_0_bytes(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = b''

        # Act
        create_resp = await blob.upload_blob(data)
        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))

    @record
    def test_create_blob_from_0_bytes(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_from_0_bytes())

    async def _test_create_from_bytes_blob_unicode(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = b'hello world'

        # Act
        create_resp = await blob.upload_blob(data)
        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))

    @record
    def test_create_from_bytes_blob_unicode(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_from_bytes_blob_unicode())

    async def _test_create_from_bytes_blob_with_lease_id(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob = await self._create_blob()
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        lease = await blob.acquire_lease()

        # Act
        create_resp = await blob.upload_blob(data, lease=lease)

        # Assert
        output = await blob.download_blob(lease=lease)
        actual = await output.content_as_bytes()
        self.assertEqual(actual, data)
        self.assertEqual(output.properties.etag, create_resp.get('etag'))
        self.assertEqual(output.properties.last_modified,
                         create_resp.get('last_modified'))

    @record
    def test_create_from_bytes_blob_with_lease_id(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_from_bytes_blob_with_lease_id())

    async def _test_create_blob_from_bytes_with_metadata(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        metadata = {'hello': 'world', 'number': '42'}

        # Act
        await blob.upload_blob(data, metadata=metadata)

        # Assert
        md = await blob.get_blob_properties()
        md = md.metadata
        self.assertDictEqual(md, metadata)

    @record
    def test_create_blob_from_bytes_with_metadata(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_bytes_with_metadata())

    async def _test_create_blob_from_bytes_with_properties(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        await blob.upload_blob(data, content_settings=content_settings)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        properties = await blob.get_blob_properties()
        self.assertEqual(properties.content_settings.content_type,
                         content_settings.content_type)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_create_blob_from_bytes_with_properties(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_bytes_with_properties())

    async def _test_create_blob_from_bytes_with_progress(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        progress = []

        def callback(response):
            current = response.context['upload_stream_current']
            total = response.context['data_stream_total']
            if current is not None:
                progress.append((current, total))

        create_resp = await blob.upload_blob(data, raw_response_hook=callback)
        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))

    @record
    def test_create_blob_from_bytes_with_progress(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_bytes_with_progress())

    async def _test_create_blob_from_bytes_with_index(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        await blob.upload_blob(data[3:])

        # Assert
        db = await blob.download_blob()
        output = await db.content_as_bytes()
        self.assertEqual(data[3:], output)

    @record
    def test_create_blob_from_bytes_with_index(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_from_bytes_with_index())

    async def _test_create_blob_from_bytes_with_index_and_count(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        await blob.upload_blob(data[3:], length=5)

        # Assert
        db = await blob.download_blob()
        output = await db.content_as_bytes()
        self.assertEqual(data[3:8], output)

    @record
    def test_create_blob_from_bytes_with_index_and_count(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_bytes_with_index_and_count())

    async def _test_create_blob_from_bytes_with_index_and_count_and_properties(
            self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        await blob.upload_blob(data[3:],
                               length=5,
                               content_settings=content_settings)

        # Assert
        db = await blob.download_blob()
        output = await db.content_as_bytes()
        self.assertEqual(data[3:8], output)
        properties = await blob.get_blob_properties()
        self.assertEqual(properties.content_settings.content_type,
                         content_settings.content_type)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_create_blob_from_bytes_with_index_and_count_and_properties(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_blob_from_bytes_with_index_and_count_and_properties())

    async def _test_create_blob_from_bytes_non_parallel(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        await blob.upload_blob(data, length=LARGE_BLOB_SIZE, max_concurrency=1)

        # Assert
        await self.assertBlobEqual(self.container_name, blob.blob_name, data)

    @record
    def test_create_blob_from_bytes_non_parallel(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_bytes_non_parallel())

    async def _test_create_blob_from_bytes_with_blob_tier_specified(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob_client = self.bsc.get_blob_client(self.container_name, blob_name)
        data = b'hello world'
        blob_tier = StandardBlobTier.Cool

        # Act
        await blob_client.upload_blob(data, standard_blob_tier=blob_tier)
        blob_properties = await blob_client.get_blob_properties()

        # Assert
        self.assertEqual(blob_properties.blob_tier, blob_tier)

    @record
    def test_create_blob_from_bytes_with_blob_tier_specified_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_bytes_with_blob_tier_specified())

    async def _test_create_blob_from_path(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            create_resp = await blob.upload_blob(stream)
        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))

    @record
    def test_create_blob_from_path(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_from_path())

    async def _test_create_blob_from_path_non_parallel(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(100)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            create_resp = await blob.upload_blob(stream,
                                                 length=100,
                                                 max_concurrency=1)
        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))

    @record
    def test_create_blob_from_path_non_parallel(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_path_non_parallel())

    async def _test_upload_blob_from_path_non_parallel_with_standard_blob_tier(
            self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(100)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)
        blob_tier = StandardBlobTier.Cool
        # Act
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   length=100,
                                   max_concurrency=1,
                                   standard_blob_tier=blob_tier)
        props = await blob.get_blob_properties()

        # Assert
        self.assertEqual(props.blob_tier, blob_tier)

    @record
    def test_upload_blob_from_path_non_parallel_with_standard_blob_tier_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_upload_blob_from_path_non_parallel_with_standard_blob_tier())

    async def _test_create_blob_from_path_with_progress(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        progress = []

        def callback(response):
            current = response.context['upload_stream_current']
            total = response.context['data_stream_total']
            if current is not None:
                progress.append((current, total))

        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, raw_response_hook=callback)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_create_blob_from_path_with_progress(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_path_with_progress())

    async def _test_create_blob_from_path_with_properties(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, content_settings=content_settings)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        properties = await blob.get_blob_properties()
        self.assertEqual(properties.content_settings.content_type,
                         content_settings.content_type)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_create_blob_from_path_with_properties(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_path_with_properties())

    async def _test_create_blob_from_stream_chunked_upload(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            create_resp = await blob.upload_blob(stream)
        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))

    @record
    def test_create_blob_from_stream_chunked_upload(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_stream_chunked_upload())

    async def _test_create_blob_from_stream_non_seekable_chunked_upload_known_size(
            self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        blob_size = len(data) - 66
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            non_seekable_file = StorageBlockBlobTestAsync.NonSeekableFile(
                stream)
            await blob.upload_blob(non_seekable_file,
                                   length=blob_size,
                                   max_concurrency=1)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name,
                                   data[:blob_size])

    @record
    def test_create_blob_from_stream_non_seekable_chunked_upload_known_size(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_blob_from_stream_non_seekable_chunked_upload_known_size(
            ))

    async def _test_create_blob_from_stream_non_seekable_chunked_upload_unknown_size(
            self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            non_seekable_file = StorageBlockBlobTestAsync.NonSeekableFile(
                stream)
            await blob.upload_blob(non_seekable_file, max_concurrency=1)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)

    @record
    def test_create_blob_from_stream_non_seekable_chunked_upload_unknown_size(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_blob_from_stream_non_seekable_chunked_upload_unknown_size(
            ))

    async def _test_create_blob_from_stream_with_progress_chunked_upload(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        progress = []

        def callback(response):
            current = response.context['upload_stream_current']
            total = response.context['data_stream_total']
            if current is not None:
                progress.append((current, total))

        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, raw_response_hook=callback)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_create_blob_from_stream_with_progress_chunked_upload(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_stream_with_progress_chunked_upload())

    async def _test_create_blob_from_stream_chunked_upload_with_count(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        blob_size = len(data) - 301
        with open(FILE_PATH, 'rb') as stream:
            resp = await blob.upload_blob(stream, length=blob_size)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name,
                                   data[:blob_size])

    @record
    def test_create_blob_from_stream_chunked_upload_with_count(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_stream_chunked_upload_with_count())

    async def _test_create_blob_from_stream_chunked_upload_with_count_and_properties(
            self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        blob_size = len(data) - 301
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   length=blob_size,
                                   content_settings=content_settings)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name,
                                   data[:blob_size])
        properties = await blob.get_blob_properties()
        self.assertEqual(properties.content_settings.content_type,
                         content_settings.content_type)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_create_blob_from_stream_chunked_upload_with_count_and_properties(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_blob_from_stream_chunked_upload_with_count_and_properties(
            ))

    async def _test_create_blob_from_stream_chunked_upload_with_properties(
            self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, content_settings=content_settings)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        properties = await blob.get_blob_properties()
        self.assertEqual(properties.content_settings.content_type,
                         content_settings.content_type)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_create_blob_from_stream_chunked_upload_with_properties(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_stream_chunked_upload_with_properties(
            ))

    async def _test_create_blob_from_stream_chunked_upload_with_properties(
            self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)
        blob_tier = StandardBlobTier.Cool

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   content_settings=content_settings,
                                   max_concurrency=2,
                                   standard_blob_tier=blob_tier)

        properties = await blob.get_blob_properties()

        # Assert
        self.assertEqual(properties.blob_tier, blob_tier)

    @record
    def test_create_blob_from_stream_chunked_upload_with_properties_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_stream_chunked_upload_with_properties(
            ))

    async def _test_create_blob_from_text(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        text = u'hello 啊齄丂狛狜 world'
        data = text.encode('utf-8')

        # Act
        create_resp = await blob.upload_blob(text)
        props = await blob.get_blob_properties()

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assertEqual(props.etag, create_resp.get('etag'))
        self.assertEqual(props.last_modified, create_resp.get('last_modified'))

    @record
    def test_create_blob_from_text(self):
        if TestMode.need_recording_file(self.test_mode):
            return
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_from_text())

    async def _test_create_blob_from_text_with_encoding(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        text = u'hello 啊齄丂狛狜 world'
        data = text.encode('utf-16')

        # Act
        await blob.upload_blob(text, encoding='utf-16')

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)

    @record
    def test_create_blob_from_text_with_encoding(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_text_with_encoding())

    async def _test_create_blob_from_text_with_encoding_and_progress(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        text = u'hello 啊齄丂狛狜 world'
        data = text.encode('utf-16')

        # Act
        progress = []

        def callback(response):
            current = response.context['upload_stream_current']
            total = response.context['data_stream_total']
            if current is not None:
                progress.append((current, total))

        await blob.upload_blob(text,
                               encoding='utf-16',
                               raw_response_hook=callback)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_create_blob_from_text_with_encoding_and_progress(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_text_with_encoding_and_progress())

    async def _test_create_blob_from_text_chunked_upload(self):
        # parallel tests introduce random order of requests, can only run live
        await self._setup()
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_text_data(LARGE_BLOB_SIZE)
        encoded_data = data.encode('utf-8')

        # Act
        await blob.upload_blob(data)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name,
                                   encoded_data)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name,
                                   encoded_data)

    @record
    def test_create_blob_from_text_chunked_upload(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_blob_from_text_chunked_upload())

    async def _test_create_blob_with_md5(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = b'hello world'

        # Act
        await blob.upload_blob(data, validate_content=True)

        # Assert

    @record
    def test_create_blob_with_md5(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_with_md5())

    async def _test_create_blob_with_md5_chunked(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = self.get_random_bytes(LARGE_BLOB_SIZE)

        # Act
        await blob.upload_blob(data, validate_content=True)

        # Assert

    @record
    def test_create_blob_with_md5_chunked(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_blob_with_md5_chunked())
class StorageCPKAsyncTest(StorageTestCase):
    def setUp(self):
        super(StorageCPKAsyncTest, self).setUp()

        url = self._get_account_url()

        # test chunking functionality by reducing the size of each chunk,
        # otherwise the tests would take too long to execute
        self.bsc = BlobServiceClient(
            url,
            credential=self.settings.STORAGE_ACCOUNT_KEY,
            connection_data_block_size=1024,
            max_single_put_size=1024,
            min_large_block_upload_threshold=1024,
            max_block_size=1024,
            max_page_size=1024,
            transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')

        # prep some test data so that they can be used in upload tests
        self.byte_data = self.get_random_bytes(64 * 1024)

        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.create_container(self.container_name))
            except:
                pass

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.delete_container(self.container_name))
            except:
                pass

        return super(StorageCPKAsyncTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------

    def _get_blob_reference(self):
        return self.get_resource_name("cpk")

    async def _create_block_blob(self,
                                 blob_name=None,
                                 data=None,
                                 cpk=None,
                                 max_connections=1):
        blob_name = blob_name if blob_name else self._get_blob_reference()
        blob_client = self.bsc.get_blob_client(self.container_name, blob_name)
        data = data if data else b''
        resp = await blob_client.upload_blob(data,
                                             cpk=cpk,
                                             max_connections=max_connections)
        return blob_client, resp

    async def _create_append_blob(self, cpk=None):
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.create_append_blob(cpk=cpk)
        return blob

    async def _create_page_blob(self, cpk=None):
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.create_page_blob(1024 * 1024, cpk=cpk)
        return blob

    # -- Test cases for APIs supporting CPK ----------------------------------------------

    async def _test_put_block_and_put_block_list(self):
        # Arrange
        blob_client, _ = await self._create_block_blob()
        await blob_client.stage_block('1', b'AAA', cpk=TEST_ENCRYPTION_KEY)
        await blob_client.stage_block('2', b'BBB', cpk=TEST_ENCRYPTION_KEY)
        await blob_client.stage_block('3', b'CCC', cpk=TEST_ENCRYPTION_KEY)

        # Act
        block_list = [
            BlobBlock(block_id='1'),
            BlobBlock(block_id='2'),
            BlobBlock(block_id='3')
        ]
        put_block_list_resp = await blob_client.commit_block_list(
            block_list, cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(put_block_list_resp['etag'])
        self.assertIsNotNone(put_block_list_resp['last_modified'])
        self.assertTrue(put_block_list_resp['request_server_encrypted'])
        self.assertEqual(put_block_list_resp['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), b'AAABBBCCC')
        self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
        self.assertEqual(blob.properties.last_modified,
                         put_block_list_resp['last_modified'])
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    @record
    def test_put_block_and_put_block_list_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_and_put_block_list())

    async def _test_create_block_blob_with_chunks(self):
        # parallel operation
        if TestMode.need_recording_file(self.test_mode):
            return
        # Arrange
        #  to force the in-memory chunks to be used
        self.config.use_byte_buffer = True

        # Act
        # create_blob_from_bytes forces the in-memory chunks to be used
        blob_client, upload_response = await self._create_block_blob(
            data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, max_connections=2)

        # Assert
        self.assertIsNotNone(upload_response['etag'])
        self.assertIsNotNone(upload_response['last_modified'])
        self.assertTrue(upload_response['request_server_encrypted'])
        self.assertEqual(upload_response['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), self.byte_data)
        self.assertEqual(blob.properties.etag, upload_response['etag'])
        self.assertEqual(blob.properties.last_modified,
                         upload_response['last_modified'])
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    def test_create_block_blob_with_chunks_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_block_blob_with_chunks())

    async def _test_create_block_blob_with_sub_streams(self):
        # problem with the recording framework can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Act
        # create_blob_from_bytes forces the in-memory chunks to be used
        blob_client, upload_response = await self._create_block_blob(
            data=self.byte_data, cpk=TEST_ENCRYPTION_KEY, max_connections=2)

        # Assert
        self.assertIsNotNone(upload_response['etag'])
        self.assertIsNotNone(upload_response['last_modified'])
        self.assertTrue(upload_response['request_server_encrypted'])
        self.assertEqual(upload_response['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), self.byte_data)
        self.assertEqual(blob.properties.etag, upload_response['etag'])
        self.assertEqual(blob.properties.last_modified,
                         upload_response['last_modified'])
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    def test_create_block_blob_with_sub_streams_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_block_blob_with_sub_streams())

    async def _test_create_block_blob_with_single_chunk(self):
        # Act
        data = b'AAABBBCCC'
        # create_blob_from_bytes forces the in-memory chunks to be used
        blob_client, upload_response = await self._create_block_blob(
            data=data, cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(upload_response['etag'])
        self.assertIsNotNone(upload_response['last_modified'])
        self.assertTrue(upload_response['request_server_encrypted'])
        self.assertEqual(upload_response['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), data)
        self.assertEqual(blob.properties.etag, upload_response['etag'])
        self.assertEqual(blob.properties.last_modified,
                         upload_response['last_modified'])
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    @record
    def test_create_block_blob_with_single_chunk_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_block_blob_with_single_chunk())

    async def _test_put_block_from_url_and_commit(self):
        # Arrange
        # create source blob and get source blob url
        source_blob_name = self.get_resource_name("sourceblob")
        self.config.use_byte_buffer = True  # Make sure using chunk upload, then we can record the request
        source_blob_client, _ = await self._create_block_blob(
            blob_name=source_blob_name, data=self.byte_data)
        source_blob_sas = source_blob_client.generate_shared_access_signature(
            permission=BlobPermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1))
        source_blob_url = source_blob_client.url + "?" + source_blob_sas

        # create destination blob
        self.config.use_byte_buffer = False
        destination_blob_client, _ = await self._create_block_blob(
            cpk=TEST_ENCRYPTION_KEY)

        # Act part 1: make put block from url calls
        await destination_blob_client.stage_block_from_url(
            block_id=1,
            source_url=source_blob_url,
            source_offset=0,
            source_length=4 * 1024 - 1,
            cpk=TEST_ENCRYPTION_KEY)
        await destination_blob_client.stage_block_from_url(
            block_id=2,
            source_url=source_blob_url,
            source_offset=4 * 1024,
            source_length=8 * 1024,
            cpk=TEST_ENCRYPTION_KEY)

        # Assert blocks
        committed, uncommitted = await destination_blob_client.get_block_list(
            'all')
        self.assertEqual(len(uncommitted), 2)
        self.assertEqual(len(committed), 0)

        # commit the blocks without cpk should fail
        block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2')]
        with self.assertRaises(HttpResponseError):
            await destination_blob_client.commit_block_list(block_list)

        # Act commit the blocks with cpk should succeed
        put_block_list_resp = await destination_blob_client.commit_block_list(
            block_list, cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(put_block_list_resp['etag'])
        self.assertIsNotNone(put_block_list_resp['last_modified'])
        self.assertTrue(put_block_list_resp['request_server_encrypted'])
        self.assertEqual(put_block_list_resp['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content
        blob = await destination_blob_client.download_blob(
            cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(),
                         self.byte_data[0:8 * 1024 + 1])
        self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
        self.assertEqual(blob.properties.last_modified,
                         put_block_list_resp['last_modified'])
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    @record
    def test_put_block_from_url_and_commit_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_from_url_and_commit())

    async def _test_append_block(self):
        # Arrange
        blob_client = await self._create_append_blob(cpk=TEST_ENCRYPTION_KEY)

        # Act
        for content in [b'AAA', b'BBB', b'CCC']:
            append_blob_prop = await blob_client.append_block(
                content, cpk=TEST_ENCRYPTION_KEY)

            # Assert
            self.assertIsNotNone(append_blob_prop['etag'])
            self.assertIsNotNone(append_blob_prop['last_modified'])
            self.assertTrue(append_blob_prop['request_server_encrypted'])
            self.assertEqual(append_blob_prop['encryption_key_sha256'],
                             TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), b'AAABBBCCC')
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    @record
    def test_append_block_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_block())

    async def _test_append_block_from_url(self):
        # Arrange
        source_blob_name = self.get_resource_name("sourceblob")
        self.config.use_byte_buffer = True  # chunk upload
        source_blob_client, _ = await self._create_block_blob(
            blob_name=source_blob_name, data=self.byte_data)
        source_blob_sas = source_blob_client.generate_shared_access_signature(
            permission=BlobPermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1))
        source_blob_url = source_blob_client.url + "?" + source_blob_sas

        self.config.use_byte_buffer = False
        destination_blob_client = await self._create_append_blob(
            cpk=TEST_ENCRYPTION_KEY)

        # Act
        append_blob_prop = await destination_blob_client.append_block_from_url(
            source_blob_url,
            source_range_start=0,
            source_range_end=4 * 1024 - 1,
            cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(append_blob_prop['etag'])
        self.assertIsNotNone(append_blob_prop['last_modified'])
        # TODO: verify that the swagger is correct, header wasn't added for the response
        # self.assertTrue(append_blob_prop['request_server_encrypted'])
        self.assertEqual(append_blob_prop['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await destination_blob_client.download_blob()

            # Act get the blob content
        blob = await destination_blob_client.download_blob(
            cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(),
                         self.byte_data[0:4 * 1024])
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    @record
    def test_append_block_from_url_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_block_from_url())

    async def _test_create_append_blob_with_chunks(self):
        # Arrange
        blob_client = await self._create_append_blob(cpk=TEST_ENCRYPTION_KEY)

        # Act
        append_blob_prop = await blob_client.upload_blob(
            self.byte_data,
            blob_type=BlobType.AppendBlob,
            cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(append_blob_prop['etag'])
        self.assertIsNotNone(append_blob_prop['last_modified'])
        self.assertTrue(append_blob_prop['request_server_encrypted'])
        self.assertEqual(append_blob_prop['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), self.byte_data)
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    @record
    def test_create_append_blob_with_chunks_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_append_blob_with_chunks())

    async def _test_update_page(self):
        # Arrange
        blob_client = await self._create_page_blob(cpk=TEST_ENCRYPTION_KEY)

        # Act
        page_blob_prop = await blob_client.upload_page(
            self.byte_data,
            start_range=0,
            end_range=len(self.byte_data) - 1,
            cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(page_blob_prop['etag'])
        self.assertIsNotNone(page_blob_prop['last_modified'])
        self.assertTrue(page_blob_prop['request_server_encrypted'])
        self.assertEqual(page_blob_prop['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(
            offset=0,
            length=len(self.byte_data) - 1,
            cpk=TEST_ENCRYPTION_KEY,
        )

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), self.byte_data)
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    @record
    def test_update_page_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_update_page())

    async def _test_update_page_from_url(self):
        # Arrange
        source_blob_name = self.get_resource_name("sourceblob")
        self.config.use_byte_buffer = True  # Make sure using chunk upload, then we can record the request
        source_blob_client, _ = await self._create_block_blob(
            blob_name=source_blob_name, data=self.byte_data)
        source_blob_sas = source_blob_client.generate_shared_access_signature(
            permission=BlobPermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1))
        source_blob_url = source_blob_client.url + "?" + source_blob_sas

        self.config.use_byte_buffer = False
        blob_client = await self._create_page_blob(cpk=TEST_ENCRYPTION_KEY)

        # Act
        page_blob_prop = await blob_client.upload_pages_from_url(
            source_blob_url,
            range_start=0,
            range_end=len(self.byte_data) - 1,
            source_range_start=0,
            cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(page_blob_prop['etag'])
        self.assertIsNotNone(page_blob_prop['last_modified'])
        self.assertTrue(page_blob_prop['request_server_encrypted'])
        # TODO: FIX SWAGGER
        # self.assertEqual(page_blob_prop['encryption_key_sha256'], TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(
            offset=0,
            length=len(self.byte_data) - 1,
            cpk=TEST_ENCRYPTION_KEY,
        )

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), self.byte_data)
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    @record
    def test_update_page_from_url_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_update_page_from_url())

    async def _test_create_page_blob_with_chunks(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Act
        blob_client = self.bsc.get_blob_client(self.container_name,
                                               self._get_blob_reference())
        page_blob_prop = await blob_client.upload_blob(
            self.byte_data,
            blob_type=BlobType.PageBlob,
            max_connections=2,
            cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(page_blob_prop['etag'])
        self.assertIsNotNone(page_blob_prop['last_modified'])
        self.assertTrue(page_blob_prop['request_server_encrypted'])
        self.assertEqual(page_blob_prop['encryption_key_sha256'],
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act get the blob content without cpk should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.download_blob()

        # Act get the blob content
        blob = await blob_client.download_blob(cpk=TEST_ENCRYPTION_KEY)

        # Assert content was retrieved with the cpk
        self.assertEqual(await blob.content_as_bytes(), self.byte_data)
        self.assertEqual(blob.properties.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

    def test_create_page_blob_with_chunks_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_page_blob_with_chunks())

    async def _test_get_set_blob_metadata(self):
        # Arrange
        blob_client, _ = await self._create_block_blob(data=b'AAABBBCCC',
                                                       cpk=TEST_ENCRYPTION_KEY)

        # Act without the encryption key should fail
        with self.assertRaises(HttpResponseError):
            await blob_client.get_blob_properties()

        # Act
        blob_props = await blob_client.get_blob_properties(
            cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertTrue(blob_props.server_encrypted)
        self.assertEqual(blob_props.encryption_key_sha256,
                         TEST_ENCRYPTION_KEY.key_hash)

        # Act set blob properties
        metadata = {'hello': 'world', 'number': '42', 'UP': 'UPval'}
        with self.assertRaises(HttpResponseError):
            await blob_client.set_blob_metadata(metadata=metadata, )

        await blob_client.set_blob_metadata(metadata=metadata,
                                            cpk=TEST_ENCRYPTION_KEY)

        # Assert
        blob_props = await blob_client.get_blob_properties(
            cpk=TEST_ENCRYPTION_KEY)
        md = blob_props.metadata
        self.assertEqual(3, len(md))
        self.assertEqual(md['hello'], 'world')
        self.assertEqual(md['number'], '42')
        self.assertEqual(md['UP'], 'UPval')
        self.assertFalse('up' in md)

    @record
    def test_get_set_blob_metadata_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_set_blob_metadata())

    async def _test_snapshot_blob(self):
        # Arrange
        blob_client, _ = await self._create_block_blob(data=b'AAABBBCCC',
                                                       cpk=TEST_ENCRYPTION_KEY)

        # Act without cpk should not work
        with self.assertRaises(HttpResponseError):
            await blob_client.create_snapshot()

        # Act with cpk should work
        blob_snapshot = await blob_client.create_snapshot(
            cpk=TEST_ENCRYPTION_KEY)

        # Assert
        self.assertIsNotNone(blob_snapshot)

    @record
    def test_snapshot_blob_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_snapshot_blob())
示例#5
0
class StorageLoggingTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageLoggingTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        self.bsc = BlobServiceClient(url,
                                     credential=credential,
                                     transport=AiohttpTestTransport())
        self.container_name = self.get_resource_name('utcontainer')

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.delete_container(self.container_name))
            except:
                pass

        return super(StorageLoggingTestAsync, self).tearDown()

    async def _setup(self):
        if not self.is_playback():
            try:
                # create source blob to be copied from
                self.source_blob_name = self.get_resource_name('srcblob')
                self.source_blob_data = self.get_random_bytes(4 * 1024)
                source_blob = self.bsc.get_blob_client(self.container_name,
                                                       self.source_blob_name)

                await self.bsc.create_container(self.container_name)
                await source_blob.upload_blob(self.source_blob_data)

                # generate a SAS so that it is accessible with a URL
                sas_token = source_blob.generate_shared_access_signature(
                    permission=BlobPermissions.READ,
                    expiry=datetime.utcnow() + timedelta(hours=1),
                )
                sas_source = BlobClient(source_blob.url, credential=sas_token)
                self.source_blob_url = sas_source.url
            except:
                pass

    async def _test_authorization_is_scrubbed_off(self):
        await self._setup()
        # Arrange
        container = self.bsc.get_container_client(self.container_name)
        # Act
        with LogCaptured(self) as log_captured:
            await container.get_container_properties(logging_enable=True)
            log_as_str = log_captured.getvalue()
            # Assert
            # make sure authorization header is logged, but its value is not
            # the keyword SharedKey is present in the authorization header's value
            self.assertTrue(_AUTHORIZATION_HEADER_NAME in log_as_str)
            self.assertFalse('SharedKey' in log_as_str)

    @record
    def test_authorization_is_scrubbed_off(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_authorization_is_scrubbed_off())

    async def _test_sas_signature_is_scrubbed_off(self):
        # Test can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        await self._setup()
        # Arrange
        container = self.bsc.get_container_client(self.container_name)
        token = container.generate_shared_access_signature(
            permission=ContainerPermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        # parse out the signed signature
        token_components = parse_qs(token)
        signed_signature = quote(
            token_components[QueryStringConstants.SIGNED_SIGNATURE][0])

        sas_service = ContainerClient(container.url, credential=token)

        # Act
        with LogCaptured(self) as log_captured:
            await sas_service.get_account_information(logging_enable=True)
            log_as_str = log_captured.getvalue()

            # Assert
            # make sure the query parameter 'sig' is logged, but its value is not
            self.assertTrue(
                QueryStringConstants.SIGNED_SIGNATURE in log_as_str)
            self.assertFalse(signed_signature in log_as_str)

    @record
    def test_sas_signature_is_scrubbed_off(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_sas_signature_is_scrubbed_off())

    async def _test_copy_source_sas_is_scrubbed_off(self):
        # Test can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        await self._setup()
        # Arrange
        dest_blob_name = self.get_resource_name('destblob')
        dest_blob = self.bsc.get_blob_client(self.container_name,
                                             dest_blob_name)

        # parse out the signed signature
        token_components = parse_qs(self.source_blob_url)
        signed_signature = quote(
            token_components[QueryStringConstants.SIGNED_SIGNATURE][0])

        # Act
        with LogCaptured(self) as log_captured:
            await dest_blob.start_copy_from_url(self.source_blob_url,
                                                requires_sync=True,
                                                logging_enable=True)
            log_as_str = log_captured.getvalue()

            # Assert
            # make sure the query parameter 'sig' is logged, but its value is not
            self.assertTrue(
                QueryStringConstants.SIGNED_SIGNATURE in log_as_str)
            self.assertFalse(signed_signature in log_as_str)

            # make sure authorization header is logged, but its value is not
            # the keyword SharedKey is present in the authorization header's value
            self.assertTrue(_AUTHORIZATION_HEADER_NAME in log_as_str)
            self.assertFalse('SharedKey' in log_as_str)

    @record
    def test_copy_source_sas_is_scrubbed_off(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_copy_source_sas_is_scrubbed_off())
示例#6
0
class StorageBlockBlobTestAsync(StorageTestCase):

    def setUp(self):
        super(StorageBlockBlobTestAsync, self).setUp()
        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        # test chunking functionality by reducing the size of each chunk,
        # otherwise the tests would take too long to execute
        self.bsc = BlobServiceClient(
            url,
            credential=credential,
            connection_data_block_size=4 * 1024,
            max_single_put_size=32 * 1024,
            max_block_size=4 * 1024,
            transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')

        # create source blob to be copied from
        self.source_blob_name = self.get_resource_name('srcblob')
        self.source_blob_data = self.get_random_bytes(SOURCE_BLOB_SIZE)

        blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name)

        # generate a SAS so that it is accessible with a URL
        sas_token = blob.generate_shared_access_signature(
            permission=BlobSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(self.bsc.delete_container(self.container_name))
            except:
                pass

        return super(StorageBlockBlobTestAsync, self).tearDown()

    async def _setup(self):
        blob = self.bsc.get_blob_client(self.container_name, self.source_blob_name)
        if not self.is_playback():
            try:
                await self.bsc.create_container(self.container_name)
            except:
                pass
            await blob.upload_blob(self.source_blob_data, overwrite=True)

        # generate a SAS so that it is accessible with a URL
        sas_token = blob.generate_shared_access_signature(
            permission=BlobSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        self.source_blob_url = BlobClient.from_blob_url(blob.url, credential=sas_token).url

    async def _test_put_block_from_url_and_commit_async(self):
        # Arrange
        await self._setup()
        split = 4 * 1024
        dest_blob_name = self.get_resource_name('destblob')
        dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name)

        # Act part 1: make put block from url calls
        futures = [
            dest_blob.stage_block_from_url(
                block_id=1,
                source_url=self.source_blob_url,
                source_offset=0,
                source_length=split),
            dest_blob.stage_block_from_url(
                block_id=2,
                source_url=self.source_blob_url,
                source_offset=split,
                source_length=split)]
        await asyncio.gather(*futures)

        # Assert blocks
        committed, uncommitted = await dest_blob.get_block_list('all')
        self.assertEqual(len(uncommitted), 2)
        self.assertEqual(len(committed), 0)

        # Act part 2: commit the blocks
        await dest_blob.commit_block_list(['1', '2'])

        # Assert destination blob has right content
        content = await (await dest_blob.download_blob()).content_as_bytes()
        self.assertEqual(content, self.source_blob_data)
        self.assertEqual(len(content), 8 * 1024)

    @record
    def test_put_block_from_url_and_commit_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_from_url_and_commit_async())

    async def _test_put_block_from_url_and_validate_content_md5_async(self):
        # Arrange
        await self._setup()
        dest_blob_name = self.get_resource_name('destblob')
        dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name)
        src_md5 = StorageContentValidation.get_content_md5(self.source_blob_data)

        # Act part 1: put block from url with md5 validation
        await dest_blob.stage_block_from_url(
            block_id=1,
            source_url=self.source_blob_url,
            source_content_md5=src_md5,
            source_offset=0,
            source_length=8 * 1024)

        # Assert block was staged
        committed, uncommitted = await dest_blob.get_block_list('all')
        self.assertEqual(len(uncommitted), 1)
        self.assertEqual(len(committed), 0)

        # Act part 2: put block from url with wrong md5
        fake_md5 = StorageContentValidation.get_content_md5(b"POTATO")
        with self.assertRaises(HttpResponseError) as error:
            await dest_blob.stage_block_from_url(
                block_id=2,
                source_url=self.source_blob_url,
                source_content_md5=fake_md5,
                source_offset=0,
                source_length=8 * 1024)
        self.assertEqual(error.exception.error_code, StorageErrorCode.md5_mismatch)

        # Assert block was not staged
        committed, uncommitted = await dest_blob.get_block_list('all')
        self.assertEqual(len(uncommitted), 1)
        self.assertEqual(len(committed), 0)

    @record
    def test_put_block_from_url_and_validate_content_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_from_url_and_validate_content_md5_async())

    async def _test_copy_blob_sync_async(self):
        # Arrange
        await self._setup()
        dest_blob_name = self.get_resource_name('destblob')
        dest_blob = self.bsc.get_blob_client(self.container_name, dest_blob_name)

        # Act
        copy_props = await dest_blob.start_copy_from_url(self.source_blob_url, requires_sync=True)

        # Assert
        self.assertIsNotNone(copy_props)
        self.assertIsNotNone(copy_props['copy_id'])
        self.assertEqual('success', copy_props['copy_status'])

        # Verify content
        content = await (await dest_blob.download_blob()).content_as_bytes()
        self.assertEqual(self.source_blob_data, content)

    @record
    def test_copy_blob_sync_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_copy_blob_sync_async())
class StorageGetBlobTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageGetBlobTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        # test chunking functionality by reducing the threshold
        # for chunking and the size of each chunk, otherwise
        # the tests would take too long to execute
        self.bsc = BlobServiceClient(url,
                                     credential=credential,
                                     max_single_get_size=32 * 1024,
                                     max_chunk_get_size=4 * 1024,
                                     transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')
        self.byte_blob = self.get_resource_name('byteblob')
        self.byte_data = self.get_random_bytes(64 * 1024 + 5)

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.delete_container(self.container_name))
            except:
                pass

        if os.path.isfile(FILE_PATH):
            try:
                os.remove(FILE_PATH)
            except:
                pass

        return super(StorageGetBlobTestAsync, self).tearDown()

    # --Helpers-----------------------------------------------------------------

    async def _setup(self):
        if not self.is_playback():
            container = self.bsc.get_container_client(self.container_name)
            await container.create_container()

            blob = self.bsc.get_blob_client(self.container_name,
                                            self.byte_blob)
            await blob.upload_blob(self.byte_data)

    def _get_blob_reference(self):
        return self.get_resource_name(TEST_BLOB_PREFIX)

    class NonSeekableFile(object):
        def __init__(self, wrapped_file):
            self.wrapped_file = wrapped_file

        def write(self, data):
            self.wrapped_file.write(data)

        def read(self, count):
            return self.wrapped_file.read(count)

        def seekable(self):
            return False

    # -- Get test cases for blobs ----------------------------------------------

    async def _test_unicode_get_blob_unicode_data_async(self):
        # Arrange
        await self._setup()
        blob_data = u'hello world啊齄丂狛狜'.encode('utf-8')
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        content = await blob.download_blob()

        # Assert
        self.assertIsInstance(content.properties, BlobProperties)
        self.assertEqual(await content.readall(), blob_data)

    @record
    def test_unicode_get_blob_unicode_data_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_unicode_get_blob_unicode_data_async())

    async def _test_unicode_get_blob_binary_data_async(self):
        # Arrange
        await self._setup()
        base64_data = 'AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/wABAgMEBQYHCAkKCwwNDg8QERITFBUWFxgZGhscHR4fICEiIyQlJicoKSorLC0uLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVGR0hJSktMTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gIGCg4SFhoeIiYqLjI2Oj5CRkpOUlZaXmJmam5ydnp+goaKjpKWmp6ipqqusra6vsLGys7S1tre4ubq7vL2+v8DBwsPExcbHyMnKy8zNzs/Q0dLT1NXW19jZ2tvc3d7f4OHi4+Tl5ufo6err7O3u7/Dx8vP09fb3+Pn6+/z9/v8AAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2Nzg5Ojs8PT4/QEFCQ0RFRkdISUpLTE1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4CBgoOEhYaHiImKi4yNjo+QkZKTlJWWl5iZmpucnZ6foKGio6SlpqeoqaqrrK2ur7CxsrO0tba3uLm6u7y9vr/AwcLDxMXGx8jJysvMzc7P0NHS09TV1tfY2drb3N3e3+Dh4uPk5ebn6Onq6+zt7u/w8fLz9PX29/j5+vv8/f7/AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHh8gISIjJCUmJygpKissLS4vMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUZHSElKS0xNTk9QUVJTVFVWV1hZWltcXV5fYGFiY2RlZmdoaWprbG1ub3BxcnN0dXZ3eHl6e3x9fn+AgYKDhIWGh4iJiouMjY6PkJGSk5SVlpeYmZqbnJ2en6ChoqOkpaanqKmqq6ytrq+wsbKztLW2t7i5uru8vb6/wMHCw8TFxsfIycrLzM3Oz9DR0tPU1dbX2Nna29zd3t/g4eLj5OXm5+jp6uvs7e7v8PHy8/T19vf4+fr7/P3+/w=='
        binary_data = base64.b64decode(base64_data)

        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(binary_data)

        # Act
        content = await blob.download_blob()

        # Assert
        self.assertIsInstance(content.properties, BlobProperties)
        self.assertEqual(await content.readall(), binary_data)

    @record
    def test_unicode_get_blob_binary_data_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_unicode_get_blob_binary_data_async())

    async def _test_get_blob_no_content_async(self):
        # Arrange
        await self._setup()
        blob_data = b''
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        content = await blob.download_blob()

        # Assert
        self.assertEqual(blob_data, await content.readall())
        self.assertEqual(0, content.properties.size)

    @record
    def test_get_blob_no_content_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_no_content_async())

    async def _test_get_blob_to_bytes_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        content = await (await blob.download_blob(max_concurrency=2)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)

    @record
    def test_get_blob_to_bytes_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_bytes_async())

    async def _test_ranged_get_blob_to_bytes_with_single_byte_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        content = await (await blob.download_blob(offset=0,
                                                  length=1)).readall()

        # Assert
        self.assertEqual(1, len(content))
        self.assertEqual(self.byte_data[0], content[0])

        # Act
        content = await (await blob.download_blob(offset=5,
                                                  length=1)).readall()

        # Assert
        self.assertEqual(1, len(content))
        self.assertEqual(self.byte_data[5], content[0])

    @record
    def test_ranged_get_blob_to_bytes_with_single_byte_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_ranged_get_blob_to_bytes_with_single_byte_async())

    async def _test_ranged_get_blob_to_bytes_with_zero_byte_async(self):
        await self._setup()
        blob_data = b''
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        # the get request should fail in this case since the blob is empty and yet there is a range specified
        with self.assertRaises(HttpResponseError) as e:
            await blob.download_blob(offset=0, length=5)
        self.assertEqual(StorageErrorCode.invalid_range,
                         e.exception.error_code)

        with self.assertRaises(HttpResponseError) as e:
            await blob.download_blob(offset=3, length=5)
        self.assertEqual(StorageErrorCode.invalid_range,
                         e.exception.error_code)

    @record
    def test_ranged_get_blob_to_bytes_with_zero_byte_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_ranged_get_blob_to_bytes_with_zero_byte_async())

    async def _test_ranged_get_blob_with_missing_start_range_async(self):
        await self._setup()
        blob_data = b'foobar'
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        # the get request should fail fast in this case since start_range is missing while end_range is specified
        with self.assertRaises(ValueError):
            await blob.download_blob(length=3)

    @record
    def test_ranged_get_blob_with_missing_start_range_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_ranged_get_blob_with_missing_start_range_async())

    async def _test_get_blob_to_bytes_snapshot_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
        snapshot_ref = await blob.create_snapshot()
        snapshot = self.bsc.get_blob_client(self.container_name,
                                            self.byte_blob,
                                            snapshot=snapshot_ref)

        await blob.upload_blob(
            self.byte_data,
            overwrite=True)  # Modify the blob so the Etag no longer matches

        # Act
        content = await (await
                         snapshot.download_blob(max_concurrency=2)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)

    @record
    def test_get_blob_to_bytes_snapshot_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_bytes_snapshot_async())

    async def _test_get_blob_to_bytes_with_progress_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await
                         blob.download_blob(raw_response_hook=callback,
                                            max_concurrency=2)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)
        self.assert_download_progress(len(self.byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_bytes_with_progress_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_bytes_with_progress_async())

    async def _test_get_blob_to_bytes_non_parallel_async(self):
        # Arrange
        await self._setup()
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await
                         blob.download_blob(raw_response_hook=callback,
                                            max_concurrency=1)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)
        self.assert_download_progress(len(self.byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_bytes_non_parallel_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_bytes_non_parallel_async())

    async def _test_get_blob_to_bytes_small_async(self):
        # Arrange
        await self._setup()
        blob_data = self.get_random_bytes(1024)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await blob.download_blob(raw_response_hook=callback
                                                  )).readall()

        # Assert
        self.assertEqual(blob_data, content)
        self.assert_download_progress(len(blob_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_bytes_small_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_bytes_small_async())

    async def _test_get_blob_to_stream_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)

    @record
    def test_get_blob_to_stream_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_stream_async())

    async def _test_get_blob_to_stream_with_progress_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(raw_response_hook=callback,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)
        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)
        self.assert_download_progress(len(self.byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_stream_with_progress_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_stream_with_progress_async())

    async def _test_get_blob_to_stream_non_parallel_async(self):
        # Arrange
        await self._setup()
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(raw_response_hook=callback,
                                                  max_concurrency=1)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)
        self.assert_download_progress(len(self.byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_stream_non_parallel_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_stream_non_parallel_async())

    async def _test_get_blob_to_stream_small_async(self):
        # Arrange
        await self._setup()
        blob_data = self.get_random_bytes(1024)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(raw_response_hook=callback,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, 1024)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(blob_data, actual)
        self.assert_download_progress(len(blob_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_stream_small_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_stream_small_async())

    async def _test_ranged_get_blob_to_path_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        end_range = self.config.max_single_get_size
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=end_range - 1,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, end_range - 1)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data[1:end_range], actual)

    @record
    def test_ranged_get_blob_to_path_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_ranged_get_blob_to_path_async())

    async def _test_ranged_get_blob_to_path_with_progress_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        progress = []
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        start_range = 3
        end_range = self.config.max_single_get_size + 1024
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=start_range,
                                                  length=end_range,
                                                  raw_response_hook=callback,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, self.config.max_single_get_size + 1024)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(
                self.byte_data[start_range:end_range + start_range], actual)
        self.assert_download_progress(end_range,
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_ranged_get_blob_to_path_with_progress_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_ranged_get_blob_to_path_with_progress_async())

    async def _test_ranged_get_blob_to_path_small_async(self):
        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=4,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, 4)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data[1:5], actual)

    @record
    def test_ranged_get_blob_to_path_small_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_ranged_get_blob_to_path_small_async())

    async def _test_ranged_get_blob_to_path_non_parallel_async(self):
        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=3,
                                                  max_concurrency=1)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, 3)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data[1:4], actual)

    @record
    def test_ranged_get_blob_to_path_non_parallel_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_ranged_get_blob_to_path_non_parallel_async())

    async def _test_ranged_get_blob_to_path_invalid_range_parallel_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_size = self.config.max_single_get_size + 1
        blob_data = self.get_random_bytes(blob_size)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        end_range = 2 * self.config.max_single_get_size
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=end_range,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, blob_size)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(blob_data[1:blob_size], actual)

    @record
    def test_ranged_get_blob_to_path_invalid_range_parallel_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_ranged_get_blob_to_path_invalid_range_parallel_async())

    async def _test_ranged_get_blob_to_path_invalid_range_non_parallel_async(
            self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_size = 1024
        blob_data = self.get_random_bytes(blob_size)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        # Act
        end_range = 2 * self.config.max_single_get_size
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=1,
                                                  length=end_range,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, blob_size)
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(blob_data[1:blob_size], actual)

            # Assert

    @record
    def test_ranged_get_blob_to_path_invalid_range_non_parallel_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_ranged_get_blob_to_path_invalid_range_non_parallel_async())

    async def _test_get_blob_to_text_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        text_blob = self.get_resource_name('textblob')
        text_data = self.get_random_text_data(self.config.max_single_get_size +
                                              1)
        blob = self.bsc.get_blob_client(self.container_name, text_blob)
        await blob.upload_blob(text_data)

        # Act
        stream = await blob.download_blob(max_concurrency=2, encoding='UTF-8')
        content = await stream.readall()

        # Assert
        self.assertEqual(text_data, content)

    @record
    def test_get_blob_to_text_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_text_async())

    async def _test_get_blob_to_text_with_progress_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        text_blob = self.get_resource_name('textblob')
        text_data = self.get_random_text_data(self.config.max_single_get_size +
                                              1)
        blob = self.bsc.get_blob_client(self.container_name, text_blob)
        await blob.upload_blob(text_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        stream = await blob.download_blob(raw_response_hook=callback,
                                          max_concurrency=2,
                                          encoding='UTF-8')
        content = await stream.readall()

        # Assert
        self.assertEqual(text_data, content)
        self.assert_download_progress(len(text_data.encode('utf-8')),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_text_with_progress_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_text_with_progress_async())

    async def _test_get_blob_to_text_non_parallel_async(self):
        # Arrange
        await self._setup()
        text_blob = self._get_blob_reference()
        text_data = self.get_random_text_data(self.config.max_single_get_size +
                                              1)
        blob = self.bsc.get_blob_client(self.container_name, text_blob)
        await blob.upload_blob(text_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        stream = await blob.download_blob(raw_response_hook=callback,
                                          max_concurrency=1,
                                          encoding='UTF-8')
        content = await stream.readall()

        # Assert
        self.assertEqual(text_data, content)
        self.assert_download_progress(len(text_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_text_non_parallel_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_text_non_parallel_async())

    async def _test_get_blob_to_text_small_async(self):
        # Arrange
        await self._setup()
        blob_data = self.get_random_text_data(1024)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(blob_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        stream = await blob.download_blob(raw_response_hook=callback,
                                          encoding='UTF-8')
        content = await stream.readall()

        # Assert
        self.assertEqual(blob_data, content)
        self.assert_download_progress(len(blob_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_text_small_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_text_small_async())

    async def _test_get_blob_to_text_with_encoding_async(self):
        # Arrange
        await self._setup()
        text = u'hello 啊齄丂狛狜 world'
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(text, encoding='utf-16')

        # Act
        stream = await blob.download_blob(encoding='utf-16')
        content = await stream.readall()

        # Assert
        self.assertEqual(text, content)

    @record
    def test_get_blob_to_text_with_encoding_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_text_with_encoding_async())

    async def _test_get_blob_to_text_with_encoding_and_progress_async(self):
        # Arrange
        await self._setup()
        text = u'hello 啊齄丂狛狜 world'
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(text, encoding='utf-16')

        # Act
        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        stream = await blob.download_blob(raw_response_hook=callback,
                                          encoding='utf-16')
        content = await stream.readall()

        # Assert
        self.assertEqual(text, content)
        self.assert_download_progress(len(text.encode('utf-8')),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_text_with_encoding_and_progress_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_text_with_encoding_and_progress_async())

    async def _test_get_blob_non_seekable_async(self):
        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        with open(FILE_PATH, 'wb') as stream:
            non_seekable_stream = StorageGetBlobTestAsync.NonSeekableFile(
                stream)
            downloader = await blob.download_blob(max_concurrency=1)
            read_bytes = await downloader.readinto(non_seekable_stream)

        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)

    @record
    def test_get_blob_non_seekable_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_non_seekable_async())

    async def _test_get_blob_non_seekable_parallel_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        with open(FILE_PATH, 'wb') as stream:
            non_seekable_stream = StorageGetBlobTestAsync.NonSeekableFile(
                stream)

            with self.assertRaises(ValueError):
                downloader = await blob.download_blob(max_concurrency=2)
                properties = await downloader.readinto(non_seekable_stream)

    @record
    def test_get_blob_non_seekable_parallel_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_non_seekable_parallel_async())

    async def _test_get_blob_to_stream_exact_get_size_async(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        byte_data = self.get_random_bytes(self.config.max_single_get_size)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(byte_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(raw_response_hook=callback,
                                                  max_concurrency=2)
            properties = await downloader.readinto(stream)

        # Assert
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(byte_data, actual)
        self.assert_download_progress(len(byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_to_stream_exact_get_size_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_to_stream_exact_get_size_async())

    async def _test_get_blob_exact_get_size_async(self):
        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        byte_data = self.get_random_bytes(self.config.max_single_get_size)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(byte_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await blob.download_blob(raw_response_hook=callback
                                                  )).readall()

        # Assert
        self.assertEqual(byte_data, content)
        self.assert_download_progress(len(byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_exact_get_size_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_exact_get_size_async())

    async def _test_get_blob_exact_chunk_size_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        byte_data = self.get_random_bytes(self.config.max_single_get_size +
                                          self.config.max_chunk_get_size)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(byte_data)

        progress = []

        def callback(response):
            current = response.context['download_stream_current']
            total = response.context['data_stream_total']
            progress.append((current, total))

        # Act
        content = await (await blob.download_blob(raw_response_hook=callback
                                                  )).readall()

        # Assert
        self.assertEqual(byte_data, content)
        self.assert_download_progress(len(byte_data),
                                      self.config.max_chunk_get_size,
                                      self.config.max_single_get_size,
                                      progress)

    @record
    def test_get_blob_exact_chunk_size_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_exact_chunk_size_async())

    async def _test_get_blob_to_stream_with_md5_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(validate_content=True,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, len(self.byte_data))
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(self.byte_data, actual)

    @record
    def test_get_blob_to_stream_with_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_stream_with_md5_async())

    async def _test_get_blob_with_md5_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)

        # Act
        content = await (await
                         blob.download_blob(validate_content=True,
                                            max_concurrency=2)).readall()

        # Assert
        self.assertEqual(self.byte_data, content)

    @record
    def test_get_blob_with_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_with_md5_async())

    async def _test_get_blob_range_to_stream_with_overall_md5_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
        props = await blob.get_blob_properties()
        props.content_settings.content_md5 = b'MDAwMDAwMDA='
        await blob.set_http_headers(props.content_settings)

        # Act
        with open(FILE_PATH, 'wb') as stream:
            downloader = await blob.download_blob(offset=0,
                                                  length=1024,
                                                  validate_content=True,
                                                  max_concurrency=2)
            read_bytes = await downloader.readinto(stream)

        # Assert
        self.assertEqual(read_bytes, 1024)
        self.assertEqual(b'MDAwMDAwMDA=',
                         downloader.properties.content_settings.content_md5)
        self.assertEqual(downloader.size, 1024)

    @record
    def test_get_blob_range_to_stream_with_overall_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_to_stream_with_overall_md5_async())

    async def _test_get_blob_range_with_overall_md5_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
        content = await blob.download_blob(offset=0,
                                           length=1024,
                                           validate_content=True)

        # Arrange
        props = await blob.get_blob_properties()
        props.content_settings.content_md5 = b'MDAwMDAwMDA='
        await blob.set_http_headers(props.content_settings)

        # Act
        content = await blob.download_blob(offset=0,
                                           length=1024,
                                           validate_content=True)

        # Assert
        self.assertEqual(b'MDAwMDAwMDA=',
                         content.properties.content_settings.content_md5)
        self.assertEqual(content.properties.size, 1024)

    @record
    def test_get_blob_range_with_overall_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_with_overall_md5_async())

    async def _test_get_blob_range_with_range_md5_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        await self._setup()
        blob = self.bsc.get_blob_client(self.container_name, self.byte_blob)
        content = await blob.download_blob(offset=0,
                                           length=1024,
                                           validate_content=True)

        # Arrange
        props = await blob.get_blob_properties()
        props.content_settings.content_md5 = None
        await blob.set_http_headers(props.content_settings)

        # Act
        content = await blob.download_blob(offset=0,
                                           length=1024,
                                           validate_content=True)

        # Assert
        self.assertIsNotNone(content.properties.content_settings.content_type)
        self.assertIsNone(content.properties.content_settings.content_md5)

    @record
    def test_get_blob_range_with_range_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_with_range_md5_async())
class StorageBlobRetryTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageBlobRetryTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()
        retry = ExponentialRetry(initial_backoff=1,
                                 increment_base=2,
                                 retry_total=3)

        self.bs = BlobServiceClient(url,
                                    credential=credential,
                                    retry_policy=retry)
        self.container_name = self.get_resource_name('utcontainer')

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bs.delete_container(self.container_name))
            except:
                pass

        return super(StorageBlobRetryTestAsync, self).tearDown()

    # --Helpers-----------------------------------------------------------------

    async def _setup(self):
        if not self.is_playback():
            try:
                await self.bs.create_container(self.container_name)
            except ResourceExistsError:
                pass

    class NonSeekableStream(IOBase):
        def __init__(self, wrapped_stream):
            self.wrapped_stream = wrapped_stream

        def write(self, data):
            self.wrapped_stream.write(data)

        def read(self, count):
            return self.wrapped_stream.read(count)

        def seek(self, *args, **kwargs):
            raise UnsupportedOperation("boom!")

        def tell(self):
            return self.wrapped_stream.tell()

    async def _test_retry_put_block_with_seekable_stream_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self.get_resource_name('blob')
        data = self.get_random_bytes(PUT_BLOCK_SIZE)
        data_stream = BytesIO(data)

        # rig the response so that it fails for a single time
        responder = ResponseCallback(status=201, new_status=408)

        # Act
        blob = self.bs.get_blob_client(self.container_name, blob_name)
        await blob.stage_block(
            1, data_stream, raw_response_hook=responder.override_first_status)

        # Assert
        _, uncommitted_blocks = await blob.get_block_list(
            block_list_type="uncommitted",
            raw_response_hook=responder.override_first_status)
        self.assertEqual(len(uncommitted_blocks), 1)
        self.assertEqual(uncommitted_blocks[0].size, PUT_BLOCK_SIZE)

        # Commit block and verify content
        await blob.commit_block_list(
            ['1'], raw_response_hook=responder.override_first_status)

        # Assert
        content = await (await blob.download_blob()).readall()
        self.assertEqual(content, data)

    def test_retry_put_block_with_seekable_stream_async(self):
        pytest.skip("Aiohttp closes stream after request - cannot rewind.")
        if TestMode.need_recording_file(self.test_mode):
            return
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_retry_put_block_with_seekable_stream_async())

    async def _test_retry_put_block_with_non_seekable_stream_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self.get_resource_name('blob')
        data = self.get_random_bytes(PUT_BLOCK_SIZE)
        data_stream = self.NonSeekableStream(BytesIO(data))

        # rig the response so that it fails for a single time
        responder = ResponseCallback(status=201, new_status=408)

        # Act
        blob = self.bs.get_blob_client(self.container_name, blob_name)
        # Note: put_block transforms non-seekable streams into byte arrays before handing it off to the executor
        await blob.stage_block(
            1, data_stream, raw_response_hook=responder.override_first_status)

        # Assert
        _, uncommitted_blocks = await blob.get_block_list(
            block_list_type="uncommitted",
            raw_response_hook=responder.override_first_status)
        self.assertEqual(len(uncommitted_blocks), 1)
        self.assertEqual(uncommitted_blocks[0].size, PUT_BLOCK_SIZE)

        # Commit block and verify content
        await blob.commit_block_list(
            ['1'], raw_response_hook=responder.override_first_status)

        # Assert
        content = await (await blob.download_blob()).readall()
        self.assertEqual(content, data)

    def test_retry_put_block_with_non_seekable_stream_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_retry_put_block_with_non_seekable_stream_async())

    async def _test_retry_put_block_with_non_seekable_stream_fail_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self.get_resource_name('blob')
        data = self.get_random_bytes(PUT_BLOCK_SIZE)
        data_stream = self.NonSeekableStream(BytesIO(data))

        # rig the response so that it fails for a single time
        responder = ResponseCallback(status=201, new_status=408)

        # Act
        blob = self.bs.get_blob_client(self.container_name, blob_name)

        with self.assertRaises(HttpResponseError) as error:
            await blob.stage_block(
                1,
                data_stream,
                length=PUT_BLOCK_SIZE,
                raw_response_hook=responder.override_first_status)

        # Assert
        self.assertEqual(error.exception.response.status_code, 408)

    def test_retry_put_block_with_non_seekable_stream_fail_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_retry_put_block_with_non_seekable_stream_fail_async())
示例#9
0
class StorageBlobEncryptionTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageBlobEncryptionTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        # test chunking functionality by reducing the threshold
        # for chunking and the size of each chunk, otherwise
        # the tests would take too long to execute
        self.bsc = BlobServiceClient(url,
                                     credential=credential,
                                     max_single_put_size=32 * 1024,
                                     max_block_size=4 * 1024,
                                     max_page_size=4 * 1024,
                                     transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')
        self.blob_types = (BlobType.BlockBlob, BlobType.PageBlob,
                           BlobType.AppendBlob)

        self.container_name = self.get_resource_name('utcontainer')
        self.bytes = b'Foo'

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.delete_container(self.container_name))
            except:
                pass
        if path.isfile(FILE_PATH):
            try:
                remove(FILE_PATH)
            except:
                pass

        return super(StorageBlobEncryptionTestAsync, self).tearDown()

    #--Helpers-----------------------------------------------------------------

    async def _setup(self):
        if not self.is_playback():
            container = self.bsc.get_container_client(self.container_name)
            try:
                await container.create_container()
            except:
                pass

    def _get_container_reference(self):
        return self.get_resource_name(TEST_CONTAINER_PREFIX)

    def _get_blob_reference(self, blob_type):
        return self.get_resource_name(TEST_BLOB_PREFIXES[blob_type.value])

    async def _create_small_blob(self, blob_type):
        blob_name = self._get_blob_reference(blob_type)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(self.bytes, blob_type=blob_type)
        return blob

    #--Test cases for blob encryption ----------------------------------------

    async def _test_missing_attribute_kek_wrap_async(self):
        # In the shared method _generate_blob_encryption_key
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        valid_key = KeyWrapper('key1')

        # Act
        invalid_key_1 = lambda: None  #functions are objects, so this effectively creates an empty object
        invalid_key_1.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm
        invalid_key_1.get_kid = valid_key.get_kid
        # No attribute wrap_key
        self.bsc.key_encryption_key = invalid_key_1
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

        invalid_key_2 = lambda: None  #functions are objects, so this effectively creates an empty object
        invalid_key_2.wrap_key = valid_key.wrap_key
        invalid_key_2.get_kid = valid_key.get_kid
        # No attribute get_key_wrap_algorithm
        self.bsc.key_encryption_key = invalid_key_2
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

        invalid_key_3 = lambda: None  #functions are objects, so this effectively creates an empty object
        invalid_key_3.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm
        invalid_key_3.wrap_key = valid_key.wrap_key
        # No attribute get_kid
        self.bsc.key_encryption_key = invalid_key_2
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

    @record
    def test_missing_attribute_kek_wrap_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_missing_attribute_kek_wrap_async())

    async def _test_invalid_value_kek_wrap_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')

        self.bsc.key_encryption_key.get_key_wrap_algorithm = None
        try:
            await self._create_small_blob(BlobType.BlockBlob)
            self.fail()
        except AttributeError as e:
            self.assertEqual(
                str(e),
                _ERROR_OBJECT_INVALID.format('key encryption key',
                                             'get_key_wrap_algorithm'))

        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.key_encryption_key.get_kid = None
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.key_encryption_key.wrap_key = None
        with self.assertRaises(AttributeError):
            await self._create_small_blob(BlobType.BlockBlob)

    @record
    def test_invalid_value_kek_wrap_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_invalid_value_kek_wrap_async())

    async def _test_missing_attribute_kek_unwrap_async(self):
        # Shared between all services in decrypt_blob
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        valid_key = KeyWrapper('key1')
        self.bsc.key_encryption_key = valid_key
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        # Note that KeyWrapper has a default value for key_id, so these Exceptions
        # are not due to non_matching kids.
        invalid_key_1 = lambda: None  #functions are objects, so this effectively creates an empty object
        invalid_key_1.get_kid = valid_key.get_kid
        #No attribute unwrap_key
        blob.key_encryption_key = invalid_key_1
        with self.assertRaises(HttpResponseError):
            await (await blob.download_blob()).content_as_bytes()

        invalid_key_2 = lambda: None  #functions are objects, so this effectively creates an empty object
        invalid_key_2.unwrap_key = valid_key.unwrap_key
        blob.key_encryption_key = invalid_key_2
        #No attribute get_kid
        with self.assertRaises(HttpResponseError):
            await (await blob.download_blob()).content_as_bytes()

    @record
    def test_missing_attribute_kek_unwrap_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_missing_attribute_kek_unwrap_async())

    async def _test_invalid_value_kek_unwrap_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        blob.key_encryption_key = KeyWrapper('key1')
        blob.key_encryption_key.unwrap_key = None

        with self.assertRaises(HttpResponseError) as e:
            await (await blob.download_blob()).content_as_bytes()
        self.assertEqual(str(e.exception), 'Decryption failed.')

    @record
    def test_invalid_value_kek_unwrap_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_invalid_value_kek_unwrap_async())

    async def _test_get_blob_kek_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        content = await (await blob.download_blob()).content_as_bytes()

        # Assert
        self.assertEqual(content, self.bytes)

    @record
    def test_get_blob_kek_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_kek_async())

    async def _test_get_blob_resolver_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        key_resolver = KeyResolver()
        key_resolver.put_key(self.bsc.key_encryption_key)
        self.bsc.key_resolver_function = key_resolver.resolve_key
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        self.bsc.key_encryption_key = None
        content = await (await blob.download_blob()).content_as_bytes()

        # Assert
        self.assertEqual(content, self.bytes)

    @record
    def test_get_blob_resolver_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_resolver_async())

    async def _test_get_blob_kek_RSA_async(self):
        # We can only generate random RSA keys, so this must be run live or
        # the playback test will fail due to a change in kek values.
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = RSAKeyWrapper('key2')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        content = await blob.download_blob()
        data = b""
        async for d in content:
            data += d

        # Assert
        self.assertEqual(data, self.bytes)

    @record
    def test_get_blob_kek_RSA_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_kek_RSA_async())

    async def _test_get_blob_nonmatching_kid_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        self.bsc.key_encryption_key.kid = 'Invalid'

        # Assert
        with self.assertRaises(HttpResponseError) as e:
            await (await blob.download_blob()).content_as_bytes()
        self.assertEqual(str(e.exception), 'Decryption failed.')

    @record
    def test_get_blob_nonmatching_kid_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_nonmatching_kid_async())

    async def _test_put_blob_invalid_stream_type_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        small_stream = StringIO(u'small')
        large_stream = StringIO(u'large' * self.config.max_single_put_size)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Assert
        # Block blob specific single shot
        with self.assertRaises(TypeError) as e:
            await blob.upload_blob(small_stream, length=5)
        self.assertTrue(
            'Blob data should be of type bytes.' in str(e.exception))

        # Generic blob chunked
        with self.assertRaises(TypeError) as e:
            await blob.upload_blob(large_stream)
        self.assertTrue(
            'Blob data should be of type bytes.' in str(e.exception))

    @record
    def test_put_blob_invalid_stream_type_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_put_blob_invalid_stream_type_async())

    async def _test_put_blob_chunking_required_mult_of_block_size_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(self.config.max_single_put_size +
                                        self.config.max_block_size)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=3)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=3)

        # Assert
        self.assertEqual(content, blob_content)

    @record
    def test_put_blob_chunking_required_mult_of_block_size_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_put_blob_chunking_required_mult_of_block_size_async())

    async def _test_put_blob_chunking_required_non_mult_of_block_size_async(
            self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = urandom(self.config.max_single_put_size + 1)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=3)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=3)

        # Assert
        self.assertEqual(content, blob_content)

    @record
    def test_put_blob_chunking_required_non_mult_of_block_size_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_put_blob_chunking_required_non_mult_of_block_size_async(
            ))

    async def _test_put_blob_chunking_required_range_specified_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(self.config.max_single_put_size * 2)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content,
                               length=self.config.max_single_put_size + 53,
                               max_concurrency=3)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=3)

        # Assert
        self.assertEqual(content[:self.config.max_single_put_size + 53],
                         blob_content)

    @record
    def test_put_blob_chunking_required_range_specified_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_put_blob_chunking_required_range_specified_async())

    async def _test_put_block_blob_single_shot_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = b'small'
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob()).content_as_bytes()

        # Assert
        self.assertEqual(content, blob_content)

    @record
    def test_put_block_blob_single_shot_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_blob_single_shot_async())

    async def _test_put_blob_range_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        content = b'Random repeats' * self.config.max_single_put_size * 5

        # All page blob uploads call _upload_chunks, so this will test the ability
        # of that function to handle ranges even though it's a small blob
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content[2:],
                               length=self.config.max_single_put_size + 5,
                               max_concurrency=1)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=1)

        # Assert
        self.assertEqual(content[2:2 + self.config.max_single_put_size + 5],
                         blob_content)

    @record
    def test_put_blob_range_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_blob_range_async())

    async def _test_put_blob_empty_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = b''
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=2)

        # Assert
        self.assertEqual(content, blob_content)

    @record
    def test_put_blob_empty_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_blob_empty_async())

    async def _test_put_blob_serial_upload_chunking_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(self.config.max_single_put_size + 1)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=1)
        blob_content = await (await blob.download_blob()).content_as_bytes(
            max_concurrency=1)

        # Assert
        self.assertEqual(content, blob_content)

    @record
    def test_put_blob_serial_upload_chunking_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_put_blob_serial_upload_chunking_async())

    async def _test_get_blob_range_beginning_to_middle_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=1)
        blob_content = await (await blob.download_blob(
            offset=0, length=50)).content_as_bytes(max_concurrency=1)

        # Assert
        self.assertEqual(content[:50], blob_content)

    @record
    def test_get_blob_range_beginning_to_middle_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_beginning_to_middle_async())

    async def _test_get_blob_range_middle_to_end_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content, max_concurrency=1)
        blob_content = await (await blob.download_blob(
            offset=100, length=28)).content_as_bytes()
        blob_content2 = await (await blob.download_blob(offset=100
                                                        )).content_as_bytes()

        # Assert
        self.assertEqual(content[100:], blob_content)
        self.assertEqual(content[100:], blob_content2)

    @record
    def test_get_blob_range_middle_to_end_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_middle_to_end_async())

    async def _test_get_blob_range_middle_to_middle_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob(
            offset=5, length=93)).content_as_bytes()

        # Assert
        self.assertEqual(content[5:98], blob_content)

    @record
    def test_get_blob_range_middle_to_middle_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_middle_to_middle_async())

    async def _test_get_blob_range_aligns_on_16_byte_block_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob(
            offset=48, length=16)).content_as_bytes()

        # Assert
        self.assertEqual(content[48:64], blob_content)

    @record
    def test_get_blob_range_aligns_on_16_byte_block_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_aligns_on_16_byte_block_async())

    async def _test_get_blob_range_expanded_to_beginning_block_align_async(
            self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob(
            offset=5, length=50)).content_as_bytes()

        # Assert
        self.assertEqual(content[5:55], blob_content)

    @record
    def test_get_blob_range_expanded_to_beginning_block_align_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_expanded_to_beginning_block_align_async(
            ))

    async def _test_get_blob_range_expanded_to_beginning_iv_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        self.bsc.require_encryption = True
        content = self.get_random_bytes(128)
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Act
        await blob.upload_blob(content)
        blob_content = await (await blob.download_blob(
            offset=22, length=20)).content_as_bytes()

        # Assert
        self.assertEqual(content[22:42], blob_content)

    @record
    def test_get_blob_range_expanded_to_beginning_iv_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_range_expanded_to_beginning_iv_async())

    async def _test_put_blob_strict_mode_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        content = urandom(512)

        # Assert
        for service in self.blob_types:
            blob_name = self._get_blob_reference(service)
            blob = self.bsc.get_blob_client(self.container_name, blob_name)

            with self.assertRaises(ValueError):
                await blob.upload_blob(content, blob_type=service)

            stream = BytesIO(content)
            with self.assertRaises(ValueError):
                await blob.upload_blob(stream, length=512, blob_type=service)

            FILE_PATH = 'blob_input.temp.dat'
            with open(FILE_PATH, 'wb') as stream:
                stream.write(content)
            with open(FILE_PATH, 'rb') as stream:
                with self.assertRaises(ValueError):
                    await blob.upload_blob(stream, blob_type=service)

            with self.assertRaises(ValueError):
                await blob.upload_blob('To encrypt', blob_type=service)

    @record
    def test_put_blob_strict_mode_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_blob_strict_mode_async())

    async def _test_get_blob_strict_mode_no_policy_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        blob.key_encryption_key = None

        # Assert
        with self.assertRaises(ValueError):
            await (await blob.download_blob()).content_as_bytes()

    @record
    def test_get_blob_strict_mode_no_policy_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_strict_mode_no_policy_async())

    async def _test_get_blob_strict_mode_unencrypted_blob_async(self):
        # Arrange
        await self._setup()
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        blob.require_encryption = True
        blob.key_encryption_key = KeyWrapper('key1')

        # Assert
        with self.assertRaises(HttpResponseError):
            await (await blob.download_blob()).content_as_bytes()

    @record
    def test_get_blob_strict_mode_unencrypted_blob_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_blob_strict_mode_unencrypted_blob_async())

    async def _test_invalid_methods_fail_block_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob_name = self._get_blob_reference(BlobType.BlockBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Assert
        with self.assertRaises(ValueError) as e:
            await blob.stage_block('block1', urandom(32))
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

        with self.assertRaises(ValueError) as e:
            await blob.commit_block_list(['block1'])
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

    @record
    def test_invalid_methods_fail_block_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_invalid_methods_fail_block_async())

    async def _test_invalid_methods_fail_append_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob_name = self._get_blob_reference(BlobType.AppendBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Assert
        with self.assertRaises(ValueError) as e:
            await blob.append_block(urandom(32))
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

        with self.assertRaises(ValueError) as e:
            await blob.create_append_blob()
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

        # All append_from operations funnel into append_from_stream, so testing one is sufficient
        with self.assertRaises(ValueError) as e:
            await blob.upload_blob(b'To encrypt',
                                   blob_type=BlobType.AppendBlob)
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

    @record
    def test_invalid_methods_fail_append_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_invalid_methods_fail_append_async())

    async def _test_invalid_methods_fail_page_async(self):
        # Arrange
        await self._setup()
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob_name = self._get_blob_reference(BlobType.PageBlob)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        # Assert
        with self.assertRaises(ValueError) as e:
            await blob.upload_page(urandom(512),
                                   offset=0,
                                   length=512,
                                   blob_type=BlobType.PageBlob)
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

        with self.assertRaises(ValueError) as e:
            await blob.create_page_blob(512)
        self.assertEqual(str(e.exception),
                         _ERROR_UNSUPPORTED_METHOD_FOR_ENCRYPTION)

    @record
    def test_invalid_methods_fail_page_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_invalid_methods_fail_page_async())

    async def _test_validate_encryption_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        kek = KeyWrapper('key1')
        self.bsc.key_encryption_key = kek
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        blob.require_encryption = False
        blob.key_encryption_key = None
        content = await blob.download_blob()
        data = await content.content_as_bytes()

        encryption_data = _dict_to_encryption_data(
            loads(content.properties.metadata['encryptiondata']))
        iv = encryption_data.content_encryption_IV
        content_encryption_key = _validate_and_unwrap_cek(
            encryption_data, kek, None)
        cipher = _generate_AES_CBC_cipher(content_encryption_key, iv)
        decryptor = cipher.decryptor()
        unpadder = PKCS7(128).unpadder()

        content = decryptor.update(data) + decryptor.finalize()
        content = unpadder.update(content) + unpadder.finalize()

        self.assertEqual(self.bytes, content)

    @record
    def test_validate_encryption_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_validate_encryption_async())

    async def _test_create_block_blob_from_star_async(self):
        await self._setup()
        await self._create_blob_from_star(BlobType.BlockBlob, self.bytes,
                                          self.bytes)

        stream = BytesIO(self.bytes)
        await self._create_blob_from_star(BlobType.BlockBlob, self.bytes,
                                          stream)

        FILE_PATH = 'blob_input.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            stream.write(self.bytes)
        with open(FILE_PATH, 'rb') as stream:
            await self._create_blob_from_star(BlobType.BlockBlob, self.bytes,
                                              stream)

        await self._create_blob_from_star(BlobType.BlockBlob, b'To encrypt',
                                          'To encrypt')

    @record
    def test_create_block_blob_from_star_async(self):
        # This test only runs live
        if TestMode.need_recording_file(self.test_mode):
            return
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_block_blob_from_star_async())

    async def _test_create_page_blob_from_star_async(self):
        await self._setup()
        content = self.get_random_bytes(512)
        await self._create_blob_from_star(BlobType.PageBlob, content, content)

        stream = BytesIO(content)
        await self._create_blob_from_star(BlobType.PageBlob,
                                          content,
                                          stream,
                                          length=512)

        FILE_PATH = 'blob_input.temp.dat'
        with open(FILE_PATH, 'wb') as stream:
            stream.write(content)

        with open(FILE_PATH, 'rb') as stream:
            await self._create_blob_from_star(BlobType.PageBlob, content,
                                              stream)

    @record
    def test_create_page_blob_from_star_async(self):
        # This test only runs live
        if TestMode.need_recording_file(self.test_mode):
            return
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_page_blob_from_star_async())

    async def _create_blob_from_star(self, blob_type, content, data, **kwargs):
        blob_name = self._get_blob_reference(blob_type)
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        blob.key_encryption_key = KeyWrapper('key1')
        blob.require_encryption = True
        await blob.upload_blob(data, blob_type=blob_type, **kwargs)

        blob_content = await (await blob.download_blob()).content_as_bytes()
        self.assertEqual(content, blob_content)

    async def _test_get_blob_to_star_async(self):
        # Arrange
        await self._setup()
        self.bsc.require_encryption = True
        self.bsc.key_encryption_key = KeyWrapper('key1')
        blob = await self._create_small_blob(BlobType.BlockBlob)

        # Act
        content = await blob.download_blob()
        iter_blob = b""
        async for data in content:
            iter_blob += data
        bytes_blob = await (await blob.download_blob()).content_as_bytes()
        stream_blob = BytesIO()
        await (await blob.download_blob()).download_to_stream(stream_blob)
        stream_blob.seek(0)
        text_blob = await (await blob.download_blob()).content_as_text()

        # Assert
        self.assertEqual(self.bytes, iter_blob)
        self.assertEqual(self.bytes, bytes_blob)
        self.assertEqual(self.bytes, stream_blob.read())
        self.assertEqual(self.bytes.decode(), text_blob)

    @record
    def test_get_blob_to_star_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_blob_to_star_async())
class StorageLargeBlockBlobTestAsync(StorageTestCase):
    def setUp(self):
        super(StorageLargeBlockBlobTestAsync, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()

        # test chunking functionality by reducing the threshold
        # for chunking and the size of each chunk, otherwise
        # the tests would take too long to execute
        self.bsc = BlobServiceClient(url,
                                     credential=credential,
                                     max_single_put_size=32 * 1024,
                                     max_block_size=2 * 1024 * 1024,
                                     min_large_block_upload_threshold=1 *
                                     1024 * 1024,
                                     transport=AiohttpTestTransport())
        self.config = self.bsc._config
        self.container_name = self.get_resource_name('utcontainer')

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                loop.run_until_complete(
                    self.bsc.delete_container(self.container_name))
            except:
                pass

        if os.path.isfile(FILE_PATH):
            try:
                os.remove(FILE_PATH)
            except:
                pass

        return super(StorageLargeBlockBlobTestAsync, self).tearDown()

    # --Helpers-----------------------------------------------------------------

    async def _setup(self):
        if not self.is_playback():
            try:
                await self.bsc.create_container(self.container_name)
            except:
                pass

    def _get_blob_reference(self):
        return self.get_resource_name(TEST_BLOB_PREFIX)

    async def _create_blob(self):
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        await blob.upload_blob(b'')
        return blob

    async def assertBlobEqual(self, container_name, blob_name, expected_data):
        blob = self.bsc.get_blob_client(container_name, blob_name)
        actual_data = await blob.download_blob()
        actual_bytes = b""
        async for data in actual_data:
            actual_bytes += data
        self.assertEqual(actual_bytes, expected_data)

    # --Test cases for block blobs --------------------------------------------

    async def _test_put_block_bytes_large_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        futures = []
        for i in range(5):
            futures.append(
                blob.stage_block('block {0}'.format(i).encode('utf-8'),
                                 os.urandom(LARGE_BLOCK_SIZE)))

        await asyncio.gather(*futures)

        # Assert

    @record
    def test_put_block_bytes_large_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_bytes_large_async())

    async def _test_put_block_bytes_large_with_md5_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        for i in range(5):
            resp = await blob.stage_block(
                'block {0}'.format(i).encode('utf-8'),
                os.urandom(LARGE_BLOCK_SIZE),
                validate_content=True)
            self.assertIsNone(resp)

    @record
    def test_put_block_bytes_large_with_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_put_block_bytes_large_with_md5_async())

    async def _test_put_block_stream_large_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        for i in range(5):
            stream = BytesIO(bytearray(LARGE_BLOCK_SIZE))
            resp = await blob.stage_block(
                'block {0}'.format(i).encode('utf-8'),
                stream,
                length=LARGE_BLOCK_SIZE)
            self.assertIsNone(resp)

            # Assert

    @record
    def test_put_block_stream_large_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_put_block_stream_large_async())

    async def _test_put_block_stream_large_with_md5_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob = await self._create_blob()

        # Act
        for i in range(5):
            stream = BytesIO(bytearray(LARGE_BLOCK_SIZE))
            resp = resp = await blob.stage_block(
                'block {0}'.format(i).encode('utf-8'),
                stream,
                length=LARGE_BLOCK_SIZE,
                validate_content=True)
            self.assertIsNone(resp)

        # Assert

    @record
    def test_put_block_stream_large_with_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_put_block_stream_large_with_md5_async())

    async def _test_create_large_blob_from_path_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, max_concurrency=2)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)

    @record
    def test_create_large_blob_from_path_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_large_blob_from_path_async())

    async def _test_create_large_blob_from_path_with_md5_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   validate_content=True,
                                   max_concurrency=2)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)

    @record
    def test_create_large_blob_from_path_with_md5_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_large_blob_from_path_with_md5_async())

    async def _test_create_large_blob_from_path_non_parallel_async(self):
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(self.get_random_bytes(100))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, max_concurrency=1)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)

    @record
    def test_create_large_blob_from_path_non_parallel_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_large_blob_from_path_non_parallel_async())

    async def _test_create_large_blob_from_path_with_progress_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        progress = []

        def callback(response):
            current = response.context['upload_stream_current']
            total = response.context['data_stream_total']
            if current is not None:
                progress.append((current, total))

        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   max_concurrency=2,
                                   raw_response_hook=callback)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_create_large_blob_from_path_with_progress_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_large_blob_from_path_with_progress_async())

    async def _test_create_large_blob_from_path_with_properties_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   content_settings=content_settings,
                                   max_concurrency=2)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        properties = await blob.get_blob_properties()
        self.assertEqual(properties.content_settings.content_type,
                         content_settings.content_type)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_create_large_blob_from_path_with_properties_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_large_blob_from_path_with_properties_async())

    async def _test_create_large_blob_from_stream_chunked_upload_async(self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, max_concurrency=2)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)

    @record
    def test_create_large_blob_from_stream_chunked_upload_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_large_blob_from_stream_chunked_upload_async())

    async def _test_create_large_blob_from_stream_with_progress_chunked_upload_async(
            self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        progress = []

        def callback(response):
            current = response.context['upload_stream_current']
            total = response.context['data_stream_total']
            if current is not None:
                progress.append((current, total))

        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   max_concurrency=2,
                                   raw_response_hook=callback)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        self.assert_upload_progress(len(data), self.config.max_block_size,
                                    progress)

    @record
    def test_create_large_blob_from_stream_with_progress_chunked_upload_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_large_blob_from_stream_with_progress_chunked_upload_async(
            ))

    async def _test_create_large_blob_from_stream_chunked_upload_with_count_async(
            self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        blob_size = len(data) - 301
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, length=blob_size, max_concurrency=2)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name,
                                   data[:blob_size])

    @record
    def test_create_large_blob_from_stream_chunked_upload_with_count_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_large_blob_from_stream_chunked_upload_with_count_async(
            ))

    async def _test_create_large_blob_from_stream_chunked_upload_with_count_and_properties_async(
            self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        blob_size = len(data) - 301
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   length=blob_size,
                                   content_settings=content_settings,
                                   max_concurrency=2)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name,
                                   data[:blob_size])
        properties = await blob.get_blob_properties()
        self.assertEqual(properties.content_settings.content_type,
                         content_settings.content_type)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_create_large_blob_from_stream_chunked_upload_with_count_and_properties_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_large_blob_from_stream_chunked_upload_with_count_and_properties_async(
            ))

    async def _test_create_large_blob_from_stream_chunked_upload_with_properties_async(
            self):
        # parallel tests introduce random order of requests, can only run live
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        await self._setup()
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        data = bytearray(os.urandom(LARGE_BLOB_SIZE))
        with open(FILE_PATH, 'wb') as stream:
            stream.write(data)

        # Act
        content_settings = ContentSettings(content_type='image/png',
                                           content_language='spanish')
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream,
                                   content_settings=content_settings,
                                   max_concurrency=2)

        # Assert
        await self.assertBlobEqual(self.container_name, blob_name, data)
        properties = await blob.get_blob_properties()
        self.assertEqual(properties.content_settings.content_type,
                         content_settings.content_type)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_create_large_blob_from_stream_chunked_upload_with_properties_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_create_large_blob_from_stream_chunked_upload_with_properties_async(
            ))