Beispiel #1
0
    async def test_create_largest_blob_from_path_without_network(
            self, storage_account_name, storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(
            storage_account_name, storage_account_key)
        await self._setup(storage_account_name, storage_account_key,
                          [payload_dropping_policy, credential_policy])
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        FILE_PATH = 'largest_blob_from_path.temp.{}.dat'.format(
            str(uuid.uuid4()))
        with open(FILE_PATH, 'wb') as stream:
            largeStream = LargeStream(LARGEST_BLOCK_SIZE, 100 * 1024 * 1024)
            chunk = largeStream.read()
            while chunk:
                stream.write(chunk)
                chunk = largeStream.read()

        # Act
        with open(FILE_PATH, 'rb') as stream:
            await blob.upload_blob(stream, max_concurrency=2)

        # Assert
        self._teardown(FILE_PATH)
        self.assertEqual(payload_dropping_policy.put_block_counter, 1)
        self.assertEqual(payload_dropping_policy.put_block_sizes[0],
                         LARGEST_BLOCK_SIZE)
    def setUp(self):
        super(LargeFileTest, self).setUp()
        url = self._get_account_url()
        self.payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(
            self.settings.STORAGE_DATA_LAKE_ACCOUNT_NAME,
            self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY)
        self.dsc = DataLakeServiceClient(
            url,
            credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
            _additional_pipeline_policies=[
                self.payload_dropping_policy, credential_policy
            ])
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.dsc.__aenter__())

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(
                self.file_system_name)
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(
                    file_system.create_file_system(timeout=5))

            except ResourceExistsError:
                pass
Beispiel #3
0
    async def test_put_block_stream_largest_without_network(
            self, storage_account_name, storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(
            storage_account_name, storage_account_key)
        await self._setup(storage_account_name, storage_account_key,
                          [payload_dropping_policy, credential_policy])
        blob = await self._create_blob()

        # Act
        stream = LargeStream(LARGEST_BLOCK_SIZE)
        blockId = str(uuid.uuid4())
        requestId = str(uuid.uuid4())
        resp = await blob.stage_block(blockId,
                                      stream,
                                      length=LARGEST_BLOCK_SIZE,
                                      client_request_id=requestId)
        await blob.commit_block_list([BlobBlock(blockId)])
        block_list = await blob.get_block_list()

        # Assert
        self.assertIsNotNone(resp)
        assert 'content_md5' in resp
        assert 'content_crc64' in resp
        assert 'request_id' in resp
        self.assertIsNotNone(block_list)
        self.assertEqual(len(block_list), 2)
        self.assertEqual(len(block_list[1]), 0)
        self.assertEqual(len(block_list[0]), 1)
        self.assertEqual(payload_dropping_policy.put_block_counter, 1)
        self.assertEqual(payload_dropping_policy.put_block_sizes[0],
                         LARGEST_BLOCK_SIZE)
Beispiel #4
0
    async def test_put_block_bytes_largest_without_network(self, resource_group, location, storage_account, storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(storage_account.name, storage_account_key)
        await self._setup(storage_account, storage_account_key, [payload_dropping_policy, credential_policy])
        blob = await self._create_blob()

        # Act
        data = urandom(LARGEST_BLOCK_SIZE)
        blockId = str(uuid.uuid4()).encode('utf-8')
        resp = await blob.stage_block(
            blockId,
            data,
            length=LARGEST_BLOCK_SIZE)
        await blob.commit_block_list([BlobBlock(blockId)])
        block_list = await blob.get_block_list()

        # Assert
        self.assertIsNotNone(resp)
        assert 'content_md5' in resp
        assert 'content_crc64' in resp
        assert 'request_id' in resp
        self.assertIsNotNone(block_list)
        self.assertEqual(len(block_list), 2)
        self.assertEqual(len(block_list[1]), 0)
        self.assertEqual(len(block_list[0]), 1)
        self.assertEqual(payload_dropping_policy.put_block_counter, 1)
        self.assertEqual(payload_dropping_policy.put_block_sizes[0], LARGEST_BLOCK_SIZE)
Beispiel #5
0
    async def test_create_largest_blob_from_stream_single_upload_without_network(self, resource_group, location, storage_account, storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(storage_account.name, storage_account_key)
        await self._setup(storage_account, storage_account_key, [payload_dropping_policy, credential_policy],
                    max_single_put_size=LARGEST_SINGLE_UPLOAD_SIZE+1)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        stream = LargeStream(LARGEST_SINGLE_UPLOAD_SIZE)

        # Act
        await blob.upload_blob(stream, length=LARGEST_SINGLE_UPLOAD_SIZE, max_concurrency=1)

        # Assert
        self.assertEqual(payload_dropping_policy.put_block_counter, 0)
        self.assertEqual(payload_dropping_policy.put_blob_counter, 1)
Beispiel #6
0
    async def test_create_largest_blob_from_stream_without_network(self, resource_group, location, storage_account, storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(storage_account.name, storage_account_key)
        await self._setup(storage_account, storage_account_key, [payload_dropping_policy, credential_policy])
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        number_of_blocks = 50000

        stream = LargeStream(LARGEST_BLOCK_SIZE*number_of_blocks)

        # Act
        await blob.upload_blob(stream, max_concurrency=1)

        # Assert
        self.assertEqual(payload_dropping_policy.put_block_counter, number_of_blocks)
        self.assertEqual(payload_dropping_policy.put_block_sizes[0], LARGEST_BLOCK_SIZE)
Beispiel #7
0
    async def _setUp(self, account_name, account_key):
        url = self._get_account_url(account_name)
        self.payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(account_name, account_key)
        self.dsc = DataLakeServiceClient(url,
                                         credential=account_key,
                                         _additional_pipeline_policies=[self.payload_dropping_policy, credential_policy])

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(self.file_system_name)
            try:
                await file_system.create_file_system(timeout=5)

            except ResourceExistsError:
                pass