def setUp(self):
        super(LargeFileTest, self).setUp()
        url = self._get_account_url()
        self.payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = format_shared_key_credential(
            [self.settings.STORAGE_DATA_LAKE_ACCOUNT_NAME, "dummy"],
            self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY)
        self.dsc = DataLakeServiceClient(
            url,
            credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
            _additional_pipeline_policies=[
                self.payload_dropping_policy, credential_policy
            ])
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.dsc.__aenter__())

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(
                self.file_system_name)
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(
                    file_system.create_file_system(timeout=5))

            except ResourceExistsError:
                pass
示例#2
0
    def test_put_block_bytes_largest_without_network(self, resource_group,
                                                     location, storage_account,
                                                     storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = format_shared_key_credential(
            [storage_account.name, "dummy"], storage_account_key)
        self._setup(storage_account, storage_account_key,
                    [payload_dropping_policy, credential_policy])
        blob = self._create_blob()

        # Act
        data = urandom(LARGEST_BLOCK_SIZE)
        blockId = str(uuid.uuid4()).encode('utf-8')
        resp = blob.stage_block(blockId, data, length=LARGEST_BLOCK_SIZE)
        blob.commit_block_list([BlobBlock(blockId)])
        block_list = blob.get_block_list()

        # Assert
        self.assertIsNotNone(resp)
        assert 'content_md5' in resp
        assert 'content_crc64' in resp
        assert 'request_id' in resp
        self.assertIsNotNone(block_list)
        self.assertEqual(len(block_list), 2)
        self.assertEqual(len(block_list[1]), 0)
        self.assertEqual(len(block_list[0]), 1)
        self.assertEqual(payload_dropping_policy.put_block_counter, 1)
        self.assertEqual(payload_dropping_policy.put_block_sizes[0],
                         LARGEST_BLOCK_SIZE)
示例#3
0
    def test_create_largest_blob_from_path_without_network(
            self, resource_group, location, storage_account,
            storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = format_shared_key_credential(
            [storage_account.name, "dummy"], storage_account_key)
        self._setup(storage_account, storage_account_key,
                    [payload_dropping_policy, credential_policy])
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)
        FILE_PATH = 'largest_blob_from_path.temp.{}.dat'.format(
            str(uuid.uuid4()))
        with open(FILE_PATH, 'wb') as stream:
            largeStream = LargeStream(LARGEST_BLOCK_SIZE, 100 * 1024 * 1024)
            chunk = largeStream.read()
            while chunk:
                stream.write(chunk)
                chunk = largeStream.read()

        # Act
        with open(FILE_PATH, 'rb') as stream:
            blob.upload_blob(stream, max_concurrency=2)

        # Assert
        self._teardown(FILE_PATH)
        self.assertEqual(payload_dropping_policy.put_block_counter, 1)
        self.assertEqual(payload_dropping_policy.put_block_sizes[0],
                         LARGEST_BLOCK_SIZE)
示例#4
0
    def test_create_largest_blob_from_stream_single_upload_without_network(
            self, resource_group, location, storage_account,
            storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = format_shared_key_credential(
            [storage_account.name, "dummy"], storage_account_key)
        self._setup(storage_account,
                    storage_account_key,
                    [payload_dropping_policy, credential_policy],
                    max_single_put_size=LARGEST_SINGLE_UPLOAD_SIZE)
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        stream = LargeStream(LARGEST_SINGLE_UPLOAD_SIZE)

        # Act
        blob.upload_blob(stream,
                         length=LARGEST_SINGLE_UPLOAD_SIZE,
                         max_concurrency=1)

        # Assert
        self.assertEqual(payload_dropping_policy.put_block_counter, 0)
        self.assertEqual(payload_dropping_policy.put_blob_counter, 1)
示例#5
0
    def test_create_largest_blob_from_stream_without_network(
            self, resource_group, location, storage_account,
            storage_account_key):
        payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = format_shared_key_credential(
            [storage_account.name, "dummy"], storage_account_key)
        self._setup(storage_account, storage_account_key,
                    [payload_dropping_policy, credential_policy])
        blob_name = self._get_blob_reference()
        blob = self.bsc.get_blob_client(self.container_name, blob_name)

        number_of_blocks = 50000

        stream = LargeStream(LARGEST_BLOCK_SIZE * number_of_blocks)

        # Act
        blob.upload_blob(stream, max_concurrency=1)

        # Assert
        self.assertEqual(payload_dropping_policy.put_block_counter,
                         number_of_blocks)
        self.assertEqual(payload_dropping_policy.put_block_sizes[0],
                         LARGEST_BLOCK_SIZE)