def setUp(self):
        super(LargeFileTest, self).setUp()
        url = self._get_account_url()
        self.payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(
            self.settings.STORAGE_DATA_LAKE_ACCOUNT_NAME,
            self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY)
        self.dsc = DataLakeServiceClient(
            url,
            credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
            _additional_pipeline_policies=[
                self.payload_dropping_policy, credential_policy
            ])
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.dsc.__aenter__())

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(
                self.file_system_name)
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(
                    file_system.create_file_system(timeout=5))

            except ResourceExistsError:
                pass
 def _setUp(self, account_name, account_key):
     url = self.account_url(account_name, 'dfs')
     self.dsc = DataLakeServiceClient(url,
                                      credential=account_key,
                                      transport=AiohttpTestTransport(),
                                      logging_enable=True)
     self.config = self.dsc._config
     self.test_file_systems = []
 def setUp(self):
     super(FileSystemTest, self).setUp()
     url = self._get_account_url()
     self.dsc = DataLakeServiceClient(url, credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
                                      transport=AiohttpTestTransport())
     self.config = self.dsc._config
     loop = asyncio.get_event_loop()
     loop.run_until_complete(self.dsc.__aenter__())
     self.test_file_systems = []
 async def test_service_client_session_closes_after_filesystem_creation(
         self, datalake_storage_account_name, datalake_storage_account_key):
     self._setUp(datalake_storage_account_name, datalake_storage_account_key)
     # Arrange
     dsc2 = DataLakeServiceClient(self.dsc.url, credential=datalake_storage_account_key)
     async with DataLakeServiceClient(
             self.dsc.url, credential=datalake_storage_account_key) as ds_client:
         fs1 = await ds_client.create_file_system(self._get_file_system_reference(prefix="fs1"))
         await fs1.delete_file_system()
     await dsc2.create_file_system(self._get_file_system_reference(prefix="fs2"))
     await dsc2.close()
    async def _setUp(self, account_name, account_key):
        url = self.account_url(account_name, 'dfs')
        self.dsc = DataLakeServiceClient(url, credential=account_key)

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(self.file_system_name)
            try:
                await file_system.create_file_system(timeout=5)

            except ResourceExistsError:
                pass
    async def _test_read_file_with_user_delegation_key(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Create file
        file_client = await self._create_file_and_return_client()
        data = self.get_random_bytes(1024)
        # Upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # Get user delegation key
        token_credential = self.generate_async_oauth_token()
        service_client = DataLakeServiceClient(self._get_oauth_account_url(), credential=token_credential)
        user_delegation_key = await service_client.get_user_delegation_key(datetime.utcnow(),
                                                                     datetime.utcnow() + timedelta(hours=1))

        sas_token = generate_file_sas(file_client.account_name,
                                      file_client.file_system_name,
                                      None,
                                      file_client.path_name,
                                      user_delegation_key=user_delegation_key,
                                      permission=FileSasPermissions(read=True, create=True, write=True, delete=True),
                                      expiry=datetime.utcnow() + timedelta(hours=1),
                                      )

        # doanload the data and make sure it is the same as uploaded data
        new_file_client = DataLakeFileClient(self._get_account_url(),
                                             file_client.file_system_name,
                                             file_client.path_name,
                                             credential=sas_token)
        downloaded_data = await new_file_client.read_file()
        self.assertEqual(data, downloaded_data)
Esempio n. 7
0
    async def list_paths_in_file_system(self):

        # Instantiate a DataLakeServiceClient using a connection string
        from azure.storage.filedatalake.aio import DataLakeServiceClient
        datalake_service_client = DataLakeServiceClient.from_connection_string(
            self.connection_string)

        async with datalake_service_client:
            # Instantiate a FileSystemClient
            file_system_client = datalake_service_client.get_file_system_client(
                "mypathfilesystem")

            # Create new File System
            await file_system_client.create_file_system()

            # [START upload_file_to_file_system]
            file_client = file_system_client.get_file_client("myfile")
            await file_client.create_file()
            with open(SOURCE_FILE, "rb") as data:
                length = data.tell()
                await file_client.append_data(data, 0)
                await file_client.flush_data(length)
            # [END upload_file_to_file_system]

            # [START get_paths_in_file_system]
            path_list = file_system_client.get_paths()
            async for path in path_list:
                print(path.name + '\n')
            # [END get_paths_in_file_system]

            # Delete file system
            await file_system_client.delete_file_system()
    async def test_restore_file_system_with_sas(self, datalake_storage_account_name, datalake_storage_account_key):
        pytest.skip(
            "We are generating a SAS token therefore play only live but we also need a soft delete enabled account.")
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        token = generate_account_sas(
            self.dsc.account_name,
            self.dsc.credential.account_key,
            ResourceTypes(service=True, file_system=True),
            AccountSasPermissions(read=True, write=True, list=True, delete=True),
            datetime.utcnow() + timedelta(hours=1),
        )
        dsc = DataLakeServiceClient(self.dsc.url, token)
        name = self._get_file_system_reference(prefix="filesystem")
        filesystem_client = await dsc.create_file_system(name)
        await filesystem_client.delete_file_system()
        # to make sure the filesystem is deleted
        with self.assertRaises(ResourceNotFoundError):
            await filesystem_client.get_file_system_properties()

        filesystem_list = []
        async for fs in self.dsc.list_file_systems(include_deleted=True):
            filesystem_list.append(fs)
        self.assertTrue(len(filesystem_list) >= 1)

        restored_version = 0
        for filesystem in filesystem_list:
            # find the deleted filesystem and restore it
            if filesystem.deleted and filesystem.name == filesystem_client.file_system_name:
                restored_fs_client = await dsc.undelete_file_system(filesystem.name, filesystem.deleted_version,
                                                                    new_name="restored" + name + str(restored_version))
                restored_version += 1

                # to make sure the deleted filesystem is restored
                props = await restored_fs_client.get_file_system_properties()
                self.assertIsNotNone(props)
    async def test_get_access_control_using_delegation_sas_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)

        url = self._get_account_url(datalake_storage_account_name)
        token_credential = self.generate_oauth_token()
        dsc = DataLakeServiceClient(url, token_credential, logging_enable=True)
        file_system_name = self._get_file_system_reference()
        directory_client_name = '/'
        (await dsc.create_file_system(file_system_name)).get_directory_client(directory_client_name)

        directory_client = self.dsc.get_directory_client(file_system_name, directory_client_name)
        random_guid = uuid.uuid4()
        await directory_client.set_access_control(owner=random_guid,
                                                  permissions='0777')
        acl = await directory_client.get_access_control()

        delegation_key = await dsc.get_user_delegation_key(datetime.utcnow(),
                                                           datetime.utcnow() + timedelta(hours=1))

        token = generate_file_system_sas(
            dsc.account_name,
            file_system_name,
            delegation_key,
            permission=FileSystemSasPermissions(
                read=True, execute=True, manage_access_control=True, manage_ownership=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
            agent_object_id=random_guid
        )
        sas_directory_client = DataLakeDirectoryClient(self.dsc.url, file_system_name, directory_client_name,
                                                       credential=token, logging_enable=True)
        access_control = await sas_directory_client.get_access_control()

        self.assertIsNotNone(access_control)
Esempio n. 10
0
 async def _test_service_client_session_closes_after_filesystem_creation(
         self):
     # Arrange
     dsc2 = DataLakeServiceClient(
         self.dsc.url,
         credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY)
     async with DataLakeServiceClient(
             self.dsc.url,
             credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY
     ) as ds_client:
         fs1 = await ds_client.create_file_system(
             self._get_file_system_reference(prefix="fs1"))
         await fs1.delete_file_system()
     await dsc2.create_file_system(
         self._get_file_system_reference(prefix="fs2"))
     await dsc2.close()
    async def test_list_paths_using_file_sys_delegation_sas_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        url = self._get_account_url(datalake_storage_account_name)
        token_credential = self.generate_oauth_token()
        dsc = DataLakeServiceClient(url, token_credential)
        file_system_name = self._get_file_system_reference()
        directory_client_name = '/'
        directory_client = (await dsc.create_file_system(file_system_name)).get_directory_client(directory_client_name)

        random_guid = uuid.uuid4()
        await directory_client.set_access_control(owner=random_guid, permissions='0777')

        delegation_key = await dsc.get_user_delegation_key(datetime.utcnow(),
                                                           datetime.utcnow() + timedelta(hours=1))

        token = generate_file_system_sas(
            dsc.account_name,
            file_system_name,
            delegation_key,
            permission=DirectorySasPermissions(list=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
            agent_object_id=random_guid
        )
        sas_directory_client = FileSystemClient(self.dsc.url, file_system_name,
                                                credential=token)
        paths = list()
        async for path in sas_directory_client.get_paths():
            paths.append(path)

        self.assertEqual(0, 0)
Esempio n. 12
0
    async def set_metadata_on_file_system(self):

        # Instantiate a DataLakeServiceClient using a connection string
        from azure.storage.filedatalake.aio import DataLakeServiceClient
        datalake_service_client = DataLakeServiceClient.from_connection_string(
            self.connection_string)

        async with datalake_service_client:
            # Instantiate a FileSystemClient
            file_system_client = datalake_service_client.get_file_system_client(
                "mymetadatafilesystemsync")

            try:
                # Create new File System
                await file_system_client.create_file_system()

                # [START set_file_system_metadata]
                # Create key, value pairs for metadata
                metadata = {'type': 'test'}

                # Set metadata on the file system
                await file_system_client.set_file_system_metadata(
                    metadata=metadata)
                # [END set_file_system_metadata]

                # Get file system properties
                properties = await file_system_client.get_file_system_properties(
                )

            finally:
                # Delete file system
                await file_system_client.delete_file_system()
Esempio n. 13
0
    async def acquire_lease_on_file_system(self):

        # Instantiate a DataLakeServiceClient using a connection string
        # [START create_data_lake_service_client_from_conn_str]
        from azure.storage.filedatalake.aio import DataLakeServiceClient
        datalake_service_client = DataLakeServiceClient.from_connection_string(
            self.connection_string)
        # [END create_data_lake_service_client_from_conn_str]
        async with datalake_service_client:
            # Instantiate a FileSystemClient
            file_system_client = datalake_service_client.get_file_system_client(
                "myleasefilesystem")

            # Create new File System
            try:
                await file_system_client.create_file_system()
            except ResourceExistsError:
                pass

            # [START acquire_lease_on_file_system]
            # Acquire a lease on the file system
            lease = await file_system_client.acquire_lease()

            # Delete file system by passing in the lease
            await file_system_client.delete_file_system(lease=lease)
Esempio n. 14
0
    async def file_system_sample(self):

        # [START create_file_system_client_from_service]
        # Instantiate a DataLakeServiceClient using a connection string
        from azure.storage.filedatalake.aio import DataLakeServiceClient
        datalake_service_client = DataLakeServiceClient.from_connection_string(
            self.connection_string)

        async with datalake_service_client:
            # Instantiate a FileSystemClient
            file_system_client = datalake_service_client.get_file_system_client(
                "mynewfilesystems")
            # [END create_file_system_client_from_service]

            try:
                # [START create_file_system]
                await file_system_client.create_file_system()
                # [END create_file_system]

                # [START get_file_system_properties]
                properties = await file_system_client.get_file_system_properties(
                )
                # [END get_file_system_properties]

            finally:
                # [START delete_file_system]
                await file_system_client.delete_file_system()
Esempio n. 15
0
    async def get_directory_client_from_file_system(self):

        # Instantiate a DataLakeServiceClient using a connection string
        from azure.storage.filedatalake.aio import DataLakeServiceClient
        datalake_service_client = DataLakeServiceClient.from_connection_string(
            self.connection_string)

        async with datalake_service_client:
            # Instantiate a FileSystemClient
            file_system_client = datalake_service_client.get_file_system_client(
                "mydirectoryfilesystem")

            # Create new File System
            try:
                await file_system_client.create_file_system()
            except ResourceExistsError:
                pass

            # [START get_directory_client_from_file_system]
            # Get the DataLakeDirectoryClient from the FileSystemClient to interact with a specific file
            directory_client = file_system_client.get_directory_client(
                "mynewdirectory")
            # [END get_directory_client_from_file_system]

            # Delete file system
            await file_system_client.delete_file_system()
async def run():
    account_name = os.getenv('STORAGE_ACCOUNT_NAME', "")
    account_key = os.getenv('STORAGE_ACCOUNT_KEY', "")

    # set up the service client with the credentials from the environment variables
    service_client = DataLakeServiceClient(
        account_url="{}://{}.dfs.core.windows.net".format(
            "https", account_name),
        credential=account_key)

    async with service_client:
        # generate a random name for testing purpose
        fs_name = "testfs{}".format(random.randint(1, 1000))
        print("Generating a test filesystem named '{}'.".format(fs_name))

        # create the filesystem
        filesystem_client = await service_client.create_file_system(
            file_system=fs_name)

        # invoke the sample code
        try:
            await directory_sample(filesystem_client)
        finally:
            # clean up the demo filesystem
            await filesystem_client.delete_file_system()
    def setUp(self):
        super(DirectoryTest, self).setUp()
        url = self._get_account_url()
        self.dsc = DataLakeServiceClient(url, credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
                                         transport=AiohttpTestTransport())

        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.dsc.__aenter__())
        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(self.file_system_name)
            try:
                loop.run_until_complete(file_system.create_file_system(timeout=5))
            except ResourceExistsError:
                pass
 async def test_list_system_filesystems_async(self,
                                              datalake_storage_account_name,
                                              datalake_storage_account_key):
     self._setUp(datalake_storage_account_name,
                 datalake_storage_account_key)
     # Arrange
     dsc = DataLakeServiceClient(self.dsc.url,
                                 credential=datalake_storage_account_key)
     # Act
     filesystems = []
     async for fs in dsc.list_file_systems(include_system=True):
         filesystems.append(fs)
     # Assert
     found = False
     for fs in filesystems:
         if fs.name == "$logs":
             found = True
     self.assertEqual(found, True)
Esempio n. 19
0
    async def _setUp(self, account_name, account_key):
        url = self._get_account_url(account_name)
        self.payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(account_name, account_key)
        self.dsc = DataLakeServiceClient(url,
                                         credential=account_key,
                                         _additional_pipeline_policies=[self.payload_dropping_policy, credential_policy])

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(self.file_system_name)
            try:
                await file_system.create_file_system(timeout=5)

            except ResourceExistsError:
                pass
Esempio n. 20
0
class FileTest(StorageTestCase):
    def setUp(self):
        super(FileTest, self).setUp()
        url = self._get_account_url()
        self.dsc = DataLakeServiceClient(
            url, credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY)
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.dsc.__aenter__())

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(
                self.file_system_name)
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(
                    file_system.create_file_system(timeout=5))

            except ResourceExistsError:
                pass

    def tearDown(self):
        if not self.is_playback():
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(
                    self.dsc.delete_file_system(self.file_system_name))
                loop.run_until_complete(self.dsc.__aexit__())
            except:
                pass

        return super(FileTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_directory_reference(self, prefix=TEST_DIRECTORY_PREFIX):
        directory_name = self.get_resource_name(prefix)
        return directory_name

    def _get_file_reference(self, prefix=TEST_FILE_PREFIX):
        file_name = self.get_resource_name(prefix)
        return file_name

    async def _create_file_system(self):
        return await self.dsc.create_file_system(
            self._get_file_system_reference())

    async def _create_directory_and_return_client(self, directory=None):
        directory_name = directory if directory else self._get_directory_reference(
        )
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()
        return directory_client

    async def _create_file_and_return_client(self, directory="", file=None):
        if directory:
            await self._create_directory_and_return_client(directory)
        if not file:
            file = self._get_file_reference()
        file_client = self.dsc.get_file_client(self.file_system_name,
                                               directory + '/' + file)
        await file_client.create_file()
        return file_client

    # --Helpers-----------------------------------------------------------------

    async def _test_create_file(self):
        # Arrange
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        response = await file_client.create_file()

        # Assert
        self.assertIsNotNone(response)

    @record
    def test_create_file_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_file())

    async def _test_create_file_using_oauth_token_credential(self):
        # Arrange
        file_name = self._get_file_reference()
        token_credential = self.generate_async_oauth_token()

        # Create a directory to put the file under that
        file_client = DataLakeFileClient(self.dsc.url,
                                         self.file_system_name,
                                         file_name,
                                         credential=token_credential)

        response = await file_client.create_file()

        # Assert
        self.assertIsNotNone(response)

    @record
    def test_create_file_using_oauth_token_credential_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_file_using_oauth_token_credential())

    async def _test_create_file_with_existing_name(self):
        # Arrange
        file_client = await self._create_file_and_return_client()

        with self.assertRaises(ResourceExistsError):
            # if the file exists then throw error
            # if_none_match='*' is to make sure no existing file
            await file_client.create_file(
                match_condition=MatchConditions.IfMissing)

    @record
    def test_create_file_with_existing_name_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_file_with_existing_name())

    async def _test_create_file_with_lease_id(self):
        # Arrange
        directory_name = self._get_directory_reference()
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        # Act
        await file_client.create_file()
        lease = await file_client.acquire_lease()
        create_resp = await file_client.create_file(lease=lease)

        # Assert
        file_properties = await file_client.get_file_properties()
        self.assertIsNotNone(file_properties)
        self.assertEqual(file_properties.etag, create_resp.get('etag'))
        self.assertEqual(file_properties.last_modified,
                         create_resp.get('last_modified'))

    @record
    def test_create_file_with_lease_id_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_file_with_lease_id())

    async def _test_create_file_under_root_directory(self):
        # Arrange
        # get a file client to interact with the file under root directory
        file_client = self.dsc.get_file_client(self.file_system_name,
                                               "filename")

        response = await file_client.create_file()

        # Assert
        self.assertIsNotNone(response)

    @record
    def test_create_file_under_root_directory_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_file_under_root_directory())

    async def _test_append_data(self):
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        await file_client.create_file()

        # Act
        response = await file_client.append_data(b'abc', 0, 3)

        self.assertIsNotNone(response)

    @record
    def test_append_data_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_data())

    async def _test_append_empty_data(self):
        file_client = await self._create_file_and_return_client()

        # Act
        await file_client.flush_data(0)
        file_props = await file_client.get_file_properties()

        self.assertIsNotNone(file_props['size'], 0)

    @record
    def test_append_empty_data_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_append_empty_data())

    async def _test_flush_data(self):
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        await file_client.create_file()

        # Act
        await file_client.append_data(b'abc', 0, 3)
        response = await file_client.flush_data(3)

        # Assert
        prop = await file_client.get_file_properties()
        self.assertIsNotNone(response)
        self.assertEqual(prop['size'], 3)

    @record
    def test_flush_data_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_flush_data())

    async def _test_flush_data_with_match_condition(self):
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        resp = await file_client.create_file()

        # Act
        await file_client.append_data(b'abc', 0, 3)

        # flush is successful because it isn't touched
        response = await file_client.flush_data(
            3,
            etag=resp['etag'],
            match_condition=MatchConditions.IfNotModified)

        await file_client.append_data(b'abc', 3, 3)
        with self.assertRaises(ResourceModifiedError):
            # flush is unsuccessful because extra data were appended.
            await file_client.flush_data(
                6,
                etag=resp['etag'],
                match_condition=MatchConditions.IfNotModified)

    @record
    def test_flush_data_with_match_condition_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_flush_data_with_match_condition())

    async def _test_upload_data(self):
        # parallel upload cannot be recorded
        if TestMode.need_recording_file(self.test_mode):
            return
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        data = self.get_random_bytes(400 * 1024)
        await file_client.upload_data(data, overwrite=True, max_concurrency=5)

        downloaded_data = await (await file_client.download_file()).readall()
        self.assertEqual(data, downloaded_data)

    def test_upload_data_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_upload_data())

    async def _test_upload_data_to_existing_file_async(self):
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        # create an existing file
        file_client = directory_client.get_file_client('filename')
        await file_client.create_file()
        await file_client.append_data(b"abc", 0)
        await file_client.flush_data(3)

        # to override the existing file
        data = self.get_random_bytes(100)
        with self.assertRaises(HttpResponseError):
            await file_client.upload_data(data, max_concurrency=5)
        await file_client.upload_data(data, overwrite=True, max_concurrency=5)

        downloaded_data = await (await file_client.download_file()).readall()
        self.assertEqual(data, downloaded_data)

    @record
    def test_upload_data_to_existing_file_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_upload_data_to_existing_file_async())

    async def _test_upload_data_to_existing_file_with_content_settings_async(
            self):
        # etag in async recording file cannot be parsed properly
        if TestMode.need_recording_file(self.test_mode):
            return
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        # create an existing file
        file_client = directory_client.get_file_client('filename')
        resp = await file_client.create_file()
        etag = resp['etag']

        # to override the existing file
        data = self.get_random_bytes(100)
        content_settings = ContentSettings(content_language='spanish',
                                           content_disposition='inline')

        await file_client.upload_data(
            data,
            max_concurrency=5,
            content_settings=content_settings,
            etag=etag,
            match_condition=MatchConditions.IfNotModified)

        downloaded_data = await (await file_client.download_file()).readall()
        properties = await file_client.get_file_properties()

        self.assertEqual(data, downloaded_data)
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_upload_data_to_existing_file_with_content_settings_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_upload_data_to_existing_file_with_content_settings_async())

    async def _test_read_file(self):
        file_client = await self._create_file_and_return_client()
        data = self.get_random_bytes(1024)

        # upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # doanload the data and make sure it is the same as uploaded data
        downloaded_data = await (await file_client.download_file()).readall()
        self.assertEqual(data, downloaded_data)

    @record
    def test_read_file_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_read_file())

    async def _test_read_file_with_user_delegation_key(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Create file
        file_client = await self._create_file_and_return_client()
        data = self.get_random_bytes(1024)
        # Upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # Get user delegation key
        token_credential = self.generate_async_oauth_token()
        service_client = DataLakeServiceClient(self._get_oauth_account_url(),
                                               credential=token_credential)
        user_delegation_key = await service_client.get_user_delegation_key(
            datetime.utcnow(),
            datetime.utcnow() + timedelta(hours=1))

        sas_token = generate_file_sas(
            file_client.account_name,
            file_client.file_system_name,
            None,
            file_client.path_name,
            user_delegation_key,
            permission=FileSasPermissions(read=True,
                                          create=True,
                                          write=True,
                                          delete=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )

        # doanload the data and make sure it is the same as uploaded data
        new_file_client = DataLakeFileClient(self._get_account_url(),
                                             file_client.file_system_name,
                                             file_client.path_name,
                                             credential=sas_token)
        downloaded_data = await (await
                                 new_file_client.download_file()).readall()
        self.assertEqual(data, downloaded_data)

    @record
    def test_read_file_with_user_delegation_key_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_read_file_with_user_delegation_key())

    async def _test_read_file_into_file(self):
        file_client = await self._create_file_and_return_client()
        data = self.get_random_bytes(1024)

        # upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # doanload the data into a file and make sure it is the same as uploaded data
        with open(FILE_PATH, 'wb') as stream:
            download = await file_client.download_file(max_concurrency=2)
            await download.readinto(stream)

        # Assert
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(data, actual)

    @record
    def test_read_file_into_file_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_read_file_into_file())

    async def _test_read_file_to_text(self):
        file_client = await self._create_file_and_return_client()
        data = self.get_random_text_data(1024)

        # upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # doanload the text data and make sure it is the same as uploaded data
        downloaded_data = await (await file_client.download_file(
            max_concurrency=2, encoding="utf-8")).readall()

        # Assert
        self.assertEqual(data, downloaded_data)

    @record
    def test_read_file_to_text_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_read_file_to_text())

    async def _test_account_sas(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        file_name = self._get_file_reference()
        # create a file under root directory
        await self._create_file_and_return_client(file=file_name)

        # generate a token with file level read permission
        token = generate_account_sas(
            self.dsc.account_name,
            self.dsc.credential.account_key,
            ResourceTypes(file_system=True, object=True),
            AccountSasPermissions(read=True),
            datetime.utcnow() + timedelta(hours=1),
        )

        # read the created file which is under root directory
        file_client = DataLakeFileClient(self.dsc.url,
                                         self.file_system_name,
                                         file_name,
                                         credential=token)
        properties = await file_client.get_file_properties()

        # make sure we can read the file properties
        self.assertIsNotNone(properties)

        # try to write to the created file with the token
        with self.assertRaises(HttpResponseError):
            await file_client.append_data(b"abcd", 0, 4)

    @record
    def test_account_sas_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_account_sas())

    async def _test_file_sas_only_applies_to_file_level(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        file_name = self._get_file_reference()
        directory_name = self._get_directory_reference()
        await self._create_file_and_return_client(directory=directory_name,
                                                  file=file_name)

        # generate a token with file level read and write permissions
        token = generate_file_sas(
            self.dsc.account_name,
            self.file_system_name,
            directory_name,
            file_name,
            self.dsc.credential.account_key,
            permission=FileSasPermissions(read=True, write=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )

        # read the created file which is under root directory
        file_client = DataLakeFileClient(self.dsc.url,
                                         self.file_system_name,
                                         directory_name + '/' + file_name,
                                         credential=token)
        properties = await file_client.get_file_properties()

        # make sure we can read the file properties
        self.assertIsNotNone(properties)

        # try to write to the created file with the token
        response = await file_client.append_data(b"abcd",
                                                 0,
                                                 4,
                                                 validate_content=True)
        self.assertIsNotNone(response)

        # the token is for file level, so users are not supposed to have access to file system level operations
        file_system_client = FileSystemClient(self.dsc.url,
                                              self.file_system_name,
                                              credential=token)
        with self.assertRaises(ClientAuthenticationError):
            await file_system_client.get_file_system_properties()

        # the token is for file level, so users are not supposed to have access to directory level operations
        directory_client = DataLakeDirectoryClient(self.dsc.url,
                                                   self.file_system_name,
                                                   directory_name,
                                                   credential=token)
        with self.assertRaises(ClientAuthenticationError):
            await directory_client.get_directory_properties()

    @record
    def test_file_sas_only_applies_to_file_level_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_file_sas_only_applies_to_file_level())

    async def _test_delete_file(self):
        # Arrange
        file_client = await self._create_file_and_return_client()

        await file_client.delete_file()

        with self.assertRaises(ResourceNotFoundError):
            await file_client.get_file_properties()

    @record
    def test_delete_file_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_delete_file())

    async def _test_delete_file_with_if_unmodified_since(self):
        # Arrange
        file_client = await self._create_file_and_return_client()

        prop = await file_client.get_file_properties()
        await file_client.delete_file(if_unmodified_since=prop['last_modified']
                                      )

        # Make sure the file was deleted
        with self.assertRaises(ResourceNotFoundError):
            await file_client.get_file_properties()

    @record
    def test_delete_file_with_if_unmodified_since_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_delete_file_with_if_unmodified_since())

    async def _test_set_access_control(self):
        file_client = await self._create_file_and_return_client()

        response = await file_client.set_access_control(permissions='0777')

        # Assert
        self.assertIsNotNone(response)

    @record
    def test_set_access_control_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_set_access_control())

    async def _test_set_access_control_with_match_conditions(self):
        file_client = await self._create_file_and_return_client()

        with self.assertRaises(ResourceModifiedError):
            await file_client.set_access_control(
                permissions='0777', match_condition=MatchConditions.IfMissing)

    @record
    def test_set_access_control_with_match_conditions_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_access_control_with_match_conditions())

    async def _test_get_access_control(self):
        file_client = await self._create_file_and_return_client()
        await file_client.set_access_control(permissions='0777')

        # Act
        response = await file_client.get_access_control()

        # Assert
        self.assertIsNotNone(response)

    @record
    def test_get_access_control_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_access_control())

    async def _test_get_access_control_with_if_modified_since(self):
        file_client = await self._create_file_and_return_client()
        await file_client.set_access_control(permissions='0777')

        prop = await file_client.get_file_properties()

        # Act
        response = await file_client.get_access_control(
            if_modified_since=prop['last_modified'] - timedelta(minutes=15))

        # Assert
        self.assertIsNotNone(response)

    @record
    def test_get_access_control_with_if_modified_since_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_access_control_with_if_modified_since())

    async def _test_get_properties(self):
        # Arrange
        directory_client = await self._create_directory_and_return_client()

        metadata = {'hello': 'world', 'number': '42'}
        content_settings = ContentSettings(content_language='spanish',
                                           content_disposition='inline')
        file_client = await directory_client.create_file(
            "newfile", metadata=metadata, content_settings=content_settings)
        await file_client.append_data(b"abc", 0, 3)
        await file_client.flush_data(3)
        properties = await file_client.get_file_properties()

        # Assert
        self.assertTrue(properties)
        self.assertEqual(properties.size, 3)
        self.assertEqual(properties.metadata['hello'], metadata['hello'])
        self.assertEqual(properties.content_settings.content_language,
                         content_settings.content_language)

    @record
    def test_get_properties_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_properties())

    async def _test_rename_file_with_non_used_name(self):
        file_client = await self._create_file_and_return_client()
        data_bytes = b"abc"
        await file_client.append_data(data_bytes, 0, 3)
        await file_client.flush_data(3)
        new_client = await file_client.rename_file(
            file_client.file_system_name + '/' + 'newname')

        data = await (await new_client.download_file()).readall()
        self.assertEqual(data, data_bytes)
        self.assertEqual(new_client.path_name, "newname")

    @record
    def test_rename_file_with_non_used_name_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_rename_file_with_non_used_name())

    async def _test_rename_file_to_existing_file(self):
        # create the existing file
        existing_file_client = await self._create_file_and_return_client(
            file="existingfile")
        await existing_file_client.append_data(b"a", 0, 1)
        await existing_file_client.flush_data(1)
        old_url = existing_file_client.url

        # prepare to rename the file to the existing file
        file_client = await self._create_file_and_return_client()
        data_bytes = b"abc"
        await file_client.append_data(data_bytes, 0, 3)
        await file_client.flush_data(3)
        new_client = await file_client.rename_file(
            file_client.file_system_name + '/' +
            existing_file_client.path_name)
        new_url = file_client.url

        data = await (await new_client.download_file()).readall()
        # the existing file was overridden
        self.assertEqual(data, data_bytes)

    @record
    def test_rename_file_to_existing_file_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_rename_file_to_existing_file())

    async def _test_rename_file_will_not_change_existing_directory(self):
        if TestMode.need_recording_file(self.test_mode):
            return
        # create none empty directory(with 2 files)
        dir1 = await self._create_directory_and_return_client(directory="dir1")
        f1 = await dir1.create_file("file1")
        await f1.append_data(b"file1", 0, 5)
        await f1.flush_data(5)
        f2 = await dir1.create_file("file2")
        await f2.append_data(b"file2", 0, 5)
        await f2.flush_data(5)

        # create another none empty directory(with 2 files)
        dir2 = await self._create_directory_and_return_client(directory="dir2")
        f3 = await dir2.create_file("file3")
        await f3.append_data(b"file3", 0, 5)
        await f3.flush_data(5)
        f4 = await dir2.create_file("file4")
        await f4.append_data(b"file4", 0, 5)
        await f4.flush_data(5)

        new_client = await f3.rename_file(f1.file_system_name + '/' +
                                          f1.path_name)

        self.assertEqual(await (await new_client.download_file()).readall(),
                         b"file3")

        # make sure the data in file2 and file4 weren't touched
        f2_data = await (await f2.download_file()).readall()
        self.assertEqual(f2_data, b"file2")

        f4_data = await (await f4.download_file()).readall()
        self.assertEqual(f4_data, b"file4")

        with self.assertRaises(HttpResponseError):
            await (await f3.download_file()).readall()

    @record
    def test_rename_file_will_not_change_existing_directory_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_rename_file_will_not_change_existing_directory())
 def _setUp(self, account_name, account_key):
     url = self._get_account_url(account_name)
     self.dsc = DataLakeServiceClient(url, credential=account_key, logging_enable=True)
     self.config = self.dsc._config
class FileSystemTest(StorageTestCase):
    def _setUp(self, account_name, account_key):
        url = self._get_account_url(account_name)
        self.dsc = DataLakeServiceClient(url, credential=account_key,
                                         transport=AiohttpTestTransport(), logging_enable=True)
        self.config = self.dsc._config
        self.test_file_systems = []

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                for file_system in self.test_file_systems:
                    loop.run_until_complete(self.dsc.delete_file_system(file_system))
                loop.run_until_complete(self.fsc.__aexit__())
            except:
                pass

        return super(FileSystemTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX):
        file_system_name = self.get_resource_name(prefix)
        self.test_file_systems.append(file_system_name)
        return file_system_name

    async def _create_file_system(self, file_system_prefix=TEST_FILE_SYSTEM_PREFIX):
        return await self.dsc.create_file_system(self._get_file_system_reference(prefix=file_system_prefix))

    # --Helpers-----------------------------------------------------------------

    @DataLakePreparer()
    async def test_create_file_system_async(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system_name = self._get_file_system_reference()

        # Act
        file_system_client = self.dsc.get_file_system_client(file_system_name)
        created = await file_system_client.create_file_system()

        # Assert
        self.assertTrue(created)

    @DataLakePreparer()
    async def test_file_system_exists(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system_name = self._get_file_system_reference()

        # Act
        file_system_client1 = self.dsc.get_file_system_client(file_system_name)
        file_system_client2 = self.dsc.get_file_system_client("nonexistentfs")
        await file_system_client1.create_file_system()

        self.assertTrue(await file_system_client1.exists())
        self.assertFalse(await file_system_client2.exists())

    @DataLakePreparer()
    async def test_create_file_system_with_metadata_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        metadata = {'hello': 'world', 'number': '42'}
        file_system_name = self._get_file_system_reference()

        # Act
        file_system_client = self.dsc.get_file_system_client(file_system_name)
        created = await file_system_client.create_file_system(metadata=metadata)

        # Assert
        properties = await file_system_client.get_file_system_properties()
        self.assertTrue(created)
        self.assertDictEqual(properties.metadata, metadata)

    @DataLakePreparer()
    async def test_list_file_systems_async(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system_name = self._get_file_system_reference()
        file_system = await self.dsc.create_file_system(file_system_name)

        # Act
        file_systems = []
        async for filesystem in self.dsc.list_file_systems():
            file_systems.append(filesystem)

        # Assert
        self.assertIsNotNone(file_systems)
        self.assertGreaterEqual(len(file_systems), 1)
        self.assertIsNotNone(file_systems[0])
        self.assertNamedItemInContainer(file_systems, file_system.file_system_name)
        self.assertIsNotNone(file_systems[0].has_immutability_policy)
        self.assertIsNotNone(file_systems[0].has_legal_hold)

    @DataLakePreparer()
    async def test_delete_file_system_with_existing_file_system_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()

        # Act
        deleted = await file_system.delete_file_system()

        # Assert
        self.assertIsNone(deleted)

    @DataLakePreparer()
    async def test_rename_file_system(self, datalake_storage_account_name, datalake_storage_account_key):
        if not self.is_playback():
            return
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        old_name1 = self._get_file_system_reference(prefix="oldcontainer1")
        old_name2 = self._get_file_system_reference(prefix="oldcontainer2")
        new_name = self._get_file_system_reference(prefix="newcontainer")
        filesystem1 = await self.dsc.create_file_system(old_name1)
        await self.dsc.create_file_system(old_name2)

        new_filesystem = await self.dsc._rename_file_system(name=old_name1, new_name=new_name)
        with self.assertRaises(HttpResponseError):
            await self.dsc._rename_file_system(name=old_name2, new_name=new_name)
        with self.assertRaises(HttpResponseError):
            await filesystem1.get_file_system_properties()
        with self.assertRaises(HttpResponseError):
            await self.dsc._rename_file_system(name="badfilesystem", new_name="filesystem")
        props = await new_filesystem.get_file_system_properties()
        self.assertEqual(new_name, props.name)

    @DataLakePreparer()
    async def test_rename_file_system_with_file_system_client(
            self, datalake_storage_account_name, datalake_storage_account_key):
        pytest.skip("Feature not yet enabled. Make sure to record this test once enabled.")
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        old_name1 = self._get_file_system_reference(prefix="oldcontainer1")
        old_name2 = self._get_file_system_reference(prefix="oldcontainer2")
        new_name = self._get_file_system_reference(prefix="newcontainer")
        bad_name = self._get_file_system_reference(prefix="badcontainer")
        filesystem1 = await self.dsc.create_file_system(old_name1)
        file_system2 = await self.dsc.create_file_system(old_name2)
        bad_file_system = self.dsc.get_file_system_client(bad_name)

        new_filesystem = await filesystem1._rename_file_system(new_name=new_name)
        with self.assertRaises(HttpResponseError):
            await file_system2._rename_file_system(new_name=new_name)
        with self.assertRaises(HttpResponseError):
            await filesystem1.get_file_system_properties()
        with self.assertRaises(HttpResponseError):
            await bad_file_system._rename_file_system(new_name="filesystem")
        new_file_system_props = await new_filesystem.get_file_system_properties()
        self.assertEqual(new_name, new_file_system_props.name)

    @DataLakePreparer()
    async def test_rename_file_system_with_source_lease(
            self, datalake_storage_account_name, datalake_storage_account_key):
        if not self.is_playback():
            return
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        old_name = self._get_file_system_reference(prefix="old")
        new_name = self._get_file_system_reference(prefix="new")
        filesystem = await self.dsc.create_file_system(old_name)
        filesystem_lease_id = await filesystem.acquire_lease()
        with self.assertRaises(HttpResponseError):
            await self.dsc._rename_file_system(name=old_name, new_name=new_name)
        with self.assertRaises(HttpResponseError):
            await self.dsc._rename_file_system(name=old_name, new_name=new_name, lease="bad_id")
        new_filesystem = await self.dsc._rename_file_system(name=old_name, new_name=new_name, lease=filesystem_lease_id)
        props = await new_filesystem.get_file_system_properties()
        self.assertEqual(new_name, props.name)

    @DataLakePreparer()
    async def test_undelete_file_system(self, datalake_storage_account_name, datalake_storage_account_key):
        # Needs soft delete enabled account.
        if not self.is_playback():
            return
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        name = self._get_file_system_reference(prefix="filesystem")
        filesystem_client = await self.dsc.create_file_system(name)

        await filesystem_client.delete_file_system()
        # to make sure the filesystem deleted
        with self.assertRaises(ResourceNotFoundError):
            await filesystem_client.get_file_system_properties()

        filesystem_list = []
        async for fs in self.dsc.list_file_systems(include_deleted=True):
            filesystem_list.append(fs)
        self.assertTrue(len(filesystem_list) >= 1)

        restored_version = 0
        for filesystem in filesystem_list:
            # find the deleted filesystem and restore it
            if filesystem.deleted and filesystem.name == filesystem_client.file_system_name:
                restored_fs_client = await self.dsc.undelete_file_system(filesystem.name,
                                                                         filesystem.deleted_version,
                                                                         new_name="restored" +
                                                                                  name + str(restored_version))
                restored_version += 1

                # to make sure the deleted filesystem is restored
                props = await restored_fs_client.get_file_system_properties()
                self.assertIsNotNone(props)

    @DataLakePreparer()
    async def test_restore_to_existing_file_system(self, datalake_storage_account_name, datalake_storage_account_key):
        # Needs soft delete enabled account.
        if not self.is_playback():
            return
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # get an existing filesystem
        existing_name = self._get_file_system_reference(prefix="existing")
        name = self._get_file_system_reference(prefix="filesystem")
        existing_filesystem_client = await self.dsc.create_file_system(existing_name)
        filesystem_client = await self.dsc.create_file_system(name)

        # Act
        await filesystem_client.delete_file_system()
        # to make sure the filesystem deleted
        with self.assertRaises(ResourceNotFoundError):
            await filesystem_client.get_file_system_properties()

        filesystem_list = []
        async for fs in self.dsc.list_file_systems(include_deleted=True):
            filesystem_list.append(fs)
        self.assertTrue(len(filesystem_list) >= 1)

        for filesystem in filesystem_list:
            # find the deleted filesystem and restore it
            if filesystem.deleted and filesystem.name == filesystem_client.file_system_name:
                with self.assertRaises(HttpResponseError):
                    await self.dsc.undelete_file_system(filesystem.name, filesystem.deleted_version,
                                                        new_name=existing_filesystem_client.file_system_name)

    @DataLakePreparer()
    async def test_restore_file_system_with_sas(self, datalake_storage_account_name, datalake_storage_account_key):
        pytest.skip(
            "We are generating a SAS token therefore play only live but we also need a soft delete enabled account.")
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        token = generate_account_sas(
            self.dsc.account_name,
            self.dsc.credential.account_key,
            ResourceTypes(service=True, file_system=True),
            AccountSasPermissions(read=True, write=True, list=True, delete=True),
            datetime.utcnow() + timedelta(hours=1),
        )
        dsc = DataLakeServiceClient(self.dsc.url, token)
        name = self._get_file_system_reference(prefix="filesystem")
        filesystem_client = await dsc.create_file_system(name)
        await filesystem_client.delete_file_system()
        # to make sure the filesystem is deleted
        with self.assertRaises(ResourceNotFoundError):
            await filesystem_client.get_file_system_properties()

        filesystem_list = []
        async for fs in self.dsc.list_file_systems(include_deleted=True):
            filesystem_list.append(fs)
        self.assertTrue(len(filesystem_list) >= 1)

        restored_version = 0
        for filesystem in filesystem_list:
            # find the deleted filesystem and restore it
            if filesystem.deleted and filesystem.name == filesystem_client.file_system_name:
                restored_fs_client = await dsc.undelete_file_system(filesystem.name, filesystem.deleted_version,
                                                                    new_name="restored" + name + str(restored_version))
                restored_version += 1

                # to make sure the deleted filesystem is restored
                props = await restored_fs_client.get_file_system_properties()
                self.assertIsNotNone(props)

    @DataLakePreparer()
    async def test_delete_none_existing_file_system_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        fake_file_system_client = self.dsc.get_file_system_client("fakeclient")

        # Act
        with self.assertRaises(ResourceNotFoundError):
            await fake_file_system_client.delete_file_system(match_condition=MatchConditions.IfMissing)

    @DataLakePreparer()
    async def test_list_file_systems_with_include_metadata_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        metadata = {'hello': 'world', 'number': '42'}
        await file_system.set_file_system_metadata(metadata)

        # Act
        file_systems = []
        async for fs in self.dsc.list_file_systems(name_starts_with=file_system.file_system_name,
                                                   include_metadata=True):
            file_systems.append(fs)

        # Assert
        self.assertIsNotNone(file_systems)
        self.assertGreaterEqual(len(file_systems), 1)
        self.assertIsNotNone(file_systems[0])
        self.assertNamedItemInContainer(file_systems, file_system.file_system_name)
        self.assertDictEqual(file_systems[0].metadata, metadata)

    @DataLakePreparer()
    async def test_set_file_system_acl_async(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Act
        file_system = await self._create_file_system()
        access_policy = AccessPolicy(permission=FileSystemSasPermissions(read=True),
                                     expiry=datetime.utcnow() + timedelta(hours=1),
                                     start=datetime.utcnow())
        signed_identifier1 = {'testid': access_policy}
        response = await file_system.set_file_system_access_policy(
            signed_identifier1, public_access=PublicAccess.FileSystem)

        self.assertIsNotNone(response.get('etag'))
        self.assertIsNotNone(response.get('last_modified'))

        acl1 = await file_system.get_file_system_access_policy()
        self.assertIsNotNone(acl1['public_access'])
        self.assertEqual(len(acl1['signed_identifiers']), 1)

        # If set signed identifier without specifying the access policy then it will be default to None
        signed_identifier2 = {'testid': access_policy, 'test2': access_policy}
        await file_system.set_file_system_access_policy(signed_identifier2)
        acl2 = await file_system.get_file_system_access_policy()
        self.assertIsNone(acl2['public_access'])
        self.assertEqual(len(acl2['signed_identifiers']), 2)

    @DataLakePreparer()
    async def test_list_file_systems_by_page_async(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        for i in range(0, 6):
            await self._create_file_system(file_system_prefix="filesystem{}".format(i))

        # Act
        file_systems = []
        async for fs in await self.dsc.list_file_systems(
                results_per_page=3,
                name_starts_with="file",
                include_metadata=True).by_page().__anext__():
            file_systems.append(fs)

        # Assert
        self.assertIsNotNone(file_systems)
        self.assertGreaterEqual(len(file_systems), 3)

    @DataLakePreparer()
    async def test_list_file_systems_with_public_access_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system_name = self._get_file_system_reference()
        file_system = self.dsc.get_file_system_client(file_system_name)
        await file_system.create_file_system(public_access="blob")
        metadata = {'hello': 'world', 'number': '42'}
        await file_system.set_file_system_metadata(metadata)

        # Act
        file_systems = []
        async for fs in self.dsc.list_file_systems(name_starts_with=file_system.file_system_name,
                                                   include_metadata=True):
            file_systems.append(fs)

        # Assert
        self.assertIsNotNone(file_systems)
        self.assertGreaterEqual(len(file_systems), 1)
        self.assertIsNotNone(file_systems[0])
        self.assertNamedItemInContainer(file_systems, file_system.file_system_name)
        self.assertDictEqual(file_systems[0].metadata, metadata)
        self.assertTrue(file_systems[0].public_access is PublicAccess.File)

    @DataLakePreparer()
    async def test_get_file_system_properties_async(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        metadata = {'hello': 'world', 'number': '42'}
        file_system = await self._create_file_system()
        await file_system.set_file_system_metadata(metadata)

        # Act
        props = await file_system.get_file_system_properties()

        # Assert
        self.assertIsNotNone(props)
        self.assertDictEqual(props.metadata, metadata)
        self.assertIsNotNone(props.has_immutability_policy)
        self.assertIsNotNone(props.has_legal_hold)

    @DataLakePreparer()
    async def test_service_client_session_closes_after_filesystem_creation(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        dsc2 = DataLakeServiceClient(self.dsc.url, credential=datalake_storage_account_key)
        async with DataLakeServiceClient(
                self.dsc.url, credential=datalake_storage_account_key) as ds_client:
            fs1 = await ds_client.create_file_system(self._get_file_system_reference(prefix="fs1"))
            await fs1.delete_file_system()
        await dsc2.create_file_system(self._get_file_system_reference(prefix="fs2"))
        await dsc2.close()

    @DataLakePreparer()
    async def test_list_paths_async(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            await file_system.create_directory("dir1{}".format(i))

        paths = []
        async for path in file_system.get_paths(upn=True):
            paths.append(path)

        self.assertEqual(len(paths), 6)

    @DataLakePreparer()
    async def test_list_paths_which_are_all_files_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            await file_system.create_file("file{}".format(i))

        paths = []
        async for path in file_system.get_paths(upn=True):
            paths.append(path)

        self.assertEqual(len(paths), 6)

    @DataLakePreparer()
    async def test_list_paths_with_max_per_page_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            await file_system.create_directory("dir1{}".format(i))

        generator1 = file_system.get_paths(max_results=2, upn=True).by_page()
        paths1 = []
        async for path in await generator1.__anext__():
            paths1.append(path)

        generator2 = file_system.get_paths(max_results=4, upn=True) \
            .by_page(continuation_token=generator1.continuation_token)
        paths2 = []
        async for path in await generator2.__anext__():
            paths2.append(path)

        self.assertEqual(len(paths1), 2)
        self.assertEqual(len(paths2), 4)

    @DataLakePreparer()
    async def test_list_paths_under_specific_path_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            dir = await file_system.create_directory("dir1{}".format(i))

            # create a subdirectory under the current directory
            subdir = await dir.create_sub_directory("subdir")
            await subdir.create_sub_directory("subsub")

            # create a file under the current directory
            file_client = await subdir.create_file("file")
            await file_client.append_data(b"abced", 0, 5)
            await file_client.flush_data(5)

        generator1 = file_system.get_paths(path="dir10/subdir", max_results=2, upn=True).by_page()
        paths = []
        async for path in await generator1.__anext__():
            paths.append(path)

        self.assertEqual(len(paths), 2)
        self.assertEqual(paths[0].content_length, 5)

    @DataLakePreparer()
    async def test_list_paths_recursively_async(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            dir = await file_system.create_directory("dir1{}".format(i))

            # create a subdirectory under the current directory
            subdir = await dir.create_sub_directory("subdir")
            await subdir.create_sub_directory("subsub")

            # create a file under the current directory
            await subdir.create_file("file")

        paths = []
        async for path in file_system.get_paths(recursive=True, upn=True):
            paths.append(path)

        # there are 24 subpaths in total
        self.assertEqual(len(paths), 24)

    @DataLakePreparer()
    async def test_list_paths_pages_correctly(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system(file_system_prefix="filesystem1")
        for i in range(0, 6):
            await file_system.create_directory("dir1{}".format(i))
        for i in range(0, 6):
            await file_system.create_file("file{}".format(i))

        generator = file_system.get_paths(max_results=6, upn=True).by_page()
        paths1 = []
        async for path in await generator.__anext__():
            paths1.append(path)
        paths2 = []
        async for path in await generator.__anext__():
            paths2.append(path)

        with self.assertRaises(StopAsyncIteration):
            paths3 = []
            async for path in await generator.__anext__():
                paths3.append(path)

        self.assertEqual(len(paths1), 6)
        self.assertEqual(len(paths2), 6)

    @DataLakePreparer()
    async def test_get_deleted_paths(self, datalake_storage_account_name, datalake_storage_account_key):
        if not self.is_playback():
            return
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        file0 = await file_system.create_file("file0")
        file1 = await file_system.create_file("file1")

        dir1 = await file_system.create_directory("dir1")
        dir2 = await file_system.create_directory("dir2")
        dir3 = await file_system.create_directory("dir3")
        file_in_dir3 = await dir3.create_file("file_in_dir3")
        file_in_subdir = await dir3.create_file("subdir/file_in_subdir")

        await file0.delete_file()
        await file1.delete_file()
        await dir1.delete_directory()
        await dir2.delete_directory()
        await file_in_dir3.delete_file()
        await file_in_subdir.delete_file()
        deleted_paths = []
        async for path in file_system.list_deleted_paths():
            deleted_paths.append(path)
        dir3_paths = []
        async for path in file_system.list_deleted_paths(path_prefix="dir3/"):
            dir3_paths.append(path)

        # Assert
        self.assertEqual(len(deleted_paths), 6)
        self.assertEqual(len(dir3_paths), 2)
        self.assertIsNotNone(dir3_paths[0].deletion_id)
        self.assertIsNotNone(dir3_paths[1].deletion_id)
        self.assertEqual(dir3_paths[0].name, 'dir3/file_in_dir3')
        self.assertEqual(dir3_paths[1].name, 'dir3/subdir/file_in_subdir')

        paths_generator1 = file_system.list_deleted_paths(results_per_page=2).by_page()
        paths1 = []
        async for path in await paths_generator1.__anext__():
            paths1.append(path)

        paths_generator2 = file_system.list_deleted_paths(results_per_page=4) \
            .by_page(continuation_token=paths_generator1.continuation_token)
        paths2 = []
        async for path in await paths_generator2.__anext__():
            paths2.append(path)

        # Assert
        self.assertEqual(len(paths1), 2)
        self.assertEqual(len(paths2), 4)

    @DataLakePreparer()
    async def test_create_directory_from_file_system_client_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        await file_system.create_directory("dir1/dir2")

        paths = []
        async for path in file_system.get_paths(recursive=False, upn=True):
            paths.append(path)

        self.assertEqual(len(paths), 1)
        self.assertEqual(paths[0].name, "dir1")

    @DataLakePreparer()
    async def test_create_file_from_file_system_client_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system = await self._create_file_system()
        await file_system.create_file("dir1/dir2/file")

        paths = []
        async for path in file_system.get_paths(recursive=True, upn=True):
            paths.append(path)
        self.assertEqual(len(paths), 3)
        self.assertEqual(paths[0].name, "dir1")
        self.assertEqual(paths[2].is_directory, False)

    @DataLakePreparer()
    async def test_get_root_directory_client_async(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_system = await self._create_file_system()
        directory_client = file_system._get_root_directory_client()

        acl = 'user::rwx,group::r-x,other::rwx'
        await directory_client.set_access_control(acl=acl)
        access_control = await directory_client.get_access_control()

        self.assertEqual(acl, access_control['acl'])

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_get_access_control_using_delegation_sas_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)

        url = self._get_account_url(datalake_storage_account_name)
        token_credential = self.generate_oauth_token()
        dsc = DataLakeServiceClient(url, token_credential, logging_enable=True)
        file_system_name = self._get_file_system_reference()
        directory_client_name = '/'
        (await dsc.create_file_system(file_system_name)).get_directory_client(directory_client_name)

        directory_client = self.dsc.get_directory_client(file_system_name, directory_client_name)
        random_guid = uuid.uuid4()
        await directory_client.set_access_control(owner=random_guid,
                                                  permissions='0777')
        acl = await directory_client.get_access_control()

        delegation_key = await dsc.get_user_delegation_key(datetime.utcnow(),
                                                           datetime.utcnow() + timedelta(hours=1))

        token = generate_file_system_sas(
            dsc.account_name,
            file_system_name,
            delegation_key,
            permission=FileSystemSasPermissions(
                read=True, execute=True, manage_access_control=True, manage_ownership=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
            agent_object_id=random_guid
        )
        sas_directory_client = DataLakeDirectoryClient(self.dsc.url, file_system_name, directory_client_name,
                                                       credential=token, logging_enable=True)
        access_control = await sas_directory_client.get_access_control()

        self.assertIsNotNone(access_control)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_list_paths_using_file_sys_delegation_sas_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        url = self._get_account_url(datalake_storage_account_name)
        token_credential = self.generate_oauth_token()
        dsc = DataLakeServiceClient(url, token_credential)
        file_system_name = self._get_file_system_reference()
        directory_client_name = '/'
        directory_client = (await dsc.create_file_system(file_system_name)).get_directory_client(directory_client_name)

        random_guid = uuid.uuid4()
        await directory_client.set_access_control(owner=random_guid, permissions='0777')

        delegation_key = await dsc.get_user_delegation_key(datetime.utcnow(),
                                                           datetime.utcnow() + timedelta(hours=1))

        token = generate_file_system_sas(
            dsc.account_name,
            file_system_name,
            delegation_key,
            permission=DirectorySasPermissions(list=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
            agent_object_id=random_guid
        )
        sas_directory_client = FileSystemClient(self.dsc.url, file_system_name,
                                                credential=token)
        paths = list()
        async for path in sas_directory_client.get_paths():
            paths.append(path)

        self.assertEqual(0, 0)

    @DataLakePreparer()
    async def test_file_system_sessions_closes_properly_async(
            self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_system_client = await self._create_file_system("fs")
        async with file_system_client as fs_client:
            async with fs_client.get_file_client("file1.txt") as f_client:
                await f_client.create_file()
            async with fs_client.get_file_client("file2.txt") as f_client:
                await f_client.create_file()
            async with fs_client.get_directory_client("file1") as f_client:
                await f_client.create_directory()
            async with fs_client.get_directory_client("file2") as f_client:
                await f_client.create_directory()

    @DataLakePreparer()
    async def test_undelete_dir_with_version_id(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        if not self.is_playback():
            return
        file_system_client = await self._create_file_system("fs")
        dir_path = 'dir10'
        dir_client = await file_system_client.create_directory(dir_path)
        resp = await dir_client.delete_directory()
        with self.assertRaises(HttpResponseError):
            await file_system_client.get_file_client(dir_path).get_file_properties()
        restored_dir_client = await file_system_client._undelete_path(dir_path, resp['deletion_id'])
        resp = await restored_dir_client.get_directory_properties()
        self.assertIsNotNone(resp)

    @DataLakePreparer()
    async def test_undelete_file_with_version_id(self, datalake_storage_account_name, datalake_storage_account_key):
        self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        if not self.is_playback():
            return
        file_system_client = await self._create_file_system("fs")
        file_path = 'dir10/fileŇ'
        dir_client = await file_system_client.create_file(file_path)
        resp = await dir_client.delete_file()
        with self.assertRaises(HttpResponseError):
            await file_system_client.get_file_client(file_path).get_file_properties()
        restored_file_client = await file_system_client._undelete_path(file_path, resp['deletion_id'])
        resp = await restored_file_client.get_file_properties()
        self.assertIsNotNone(resp)
class FileTest(StorageTestCase):
    async def _setUp(self, account_name, account_key):
        url = self.account_url(account_name, 'dfs')
        self.dsc = DataLakeServiceClient(url, credential=account_key)

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(self.file_system_name)
            try:
                await file_system.create_file_system(timeout=5)

            except ResourceExistsError:
                pass

    def tearDown(self):
        if not self.is_playback():
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(self.dsc.delete_file_system(self.file_system_name))
                loop.run_until_complete(self.dsc.__aexit__())
            except:
                pass

        return super(FileTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_directory_reference(self, prefix=TEST_DIRECTORY_PREFIX):
        directory_name = self.get_resource_name(prefix)
        return directory_name

    def _get_file_reference(self, prefix=TEST_FILE_PREFIX):
        file_name = self.get_resource_name(prefix)
        return file_name

    async def _create_file_system(self):
        return await self.dsc.create_file_system(self._get_file_system_reference())

    async def _create_directory_and_return_client(self, directory=None):
        directory_name = directory if directory else self._get_directory_reference()
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()
        return directory_client

    async def _create_file_and_return_client(self, directory="", file=None):
        if directory:
            await self._create_directory_and_return_client(directory)
        if not file:
            file = self._get_file_reference()
        file_client = self.dsc.get_file_client(self.file_system_name, directory + '/' + file)
        await file_client.create_file()
        return file_client

    # --Helpers-----------------------------------------------------------------

    @DataLakePreparer()
    async def test_create_file_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        response = await file_client.create_file()

        # Assert
        self.assertIsNotNone(response)

    @DataLakePreparer()
    async def test_file_exists(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        directory_name = self._get_directory_reference()

        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client1 = directory_client.get_file_client('filename')
        file_client2 = directory_client.get_file_client('nonexistentfile')
        await file_client1.create_file()

        self.assertTrue(await file_client1.exists())
        self.assertFalse(await file_client2.exists())

    @DataLakePreparer()
    async def test_create_file_using_oauth_token_credential_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_name = self._get_file_reference()
        token_credential = self.generate_oauth_token()

        # Create a directory to put the file under that
        file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, file_name,
                                         credential=token_credential)

        response = await file_client.create_file()

        # Assert
        self.assertIsNotNone(response)

    @DataLakePreparer()
    async def test_create_file_with_existing_name_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_client = await self._create_file_and_return_client()

        with self.assertRaises(ResourceExistsError):
            # if the file exists then throw error
            # if_none_match='*' is to make sure no existing file
            await file_client.create_file(match_condition=MatchConditions.IfMissing)

    @DataLakePreparer()
    async def test_create_file_with_lease_id_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        directory_name = self._get_directory_reference()
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        # Act
        await file_client.create_file()
        lease = await file_client.acquire_lease()
        create_resp = await file_client.create_file(lease=lease)

        # Assert
        file_properties = await file_client.get_file_properties()
        self.assertIsNotNone(file_properties)
        self.assertEqual(file_properties.etag, create_resp.get('etag'))
        self.assertEqual(file_properties.last_modified, create_resp.get('last_modified'))

    @DataLakePreparer()
    async def test_create_file_under_root_directory_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        # get a file client to interact with the file under root directory
        file_client = self.dsc.get_file_client(self.file_system_name, "filename")

        response = await file_client.create_file()

        # Assert
        self.assertIsNotNone(response)

    @DataLakePreparer()
    async def test_append_data_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        await file_client.create_file()

        # Act
        response = await file_client.append_data(b'abc', 0, 3)

        self.assertIsNotNone(response)

    @DataLakePreparer()
    async def test_append_empty_data_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()

        # Act
        await file_client.flush_data(0)
        file_props = await file_client.get_file_properties()

        self.assertIsNotNone(file_props['size'], 0)

    @DataLakePreparer()
    async def test_flush_data_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        await file_client.create_file()

        # Act
        await file_client.append_data(b'abc', 0, 3)
        response = await file_client.flush_data(3)

        # Assert
        prop = await file_client.get_file_properties()
        self.assertIsNotNone(response)
        self.assertEqual(prop['size'], 3)

    @DataLakePreparer()
    async def test_flush_data_with_match_condition_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        resp = await file_client.create_file()

        # Act
        await file_client.append_data(b'abc', 0, 3)

        # flush is successful because it isn't touched
        response = await file_client.flush_data(3, etag=resp['etag'], match_condition=MatchConditions.IfNotModified)

        await file_client.append_data(b'abc', 3, 3)
        with self.assertRaises(ResourceModifiedError):
            # flush is unsuccessful because extra data were appended.
            await file_client.flush_data(6, etag=resp['etag'], match_condition=MatchConditions.IfNotModified)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_upload_data_in_substreams(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)

        # parallel upload cannot be recorded

        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        # Get 16MB data
        raw_data = self.get_random_bytes(16 * 1024 * 1024)
        # Ensure chunk size is greater than threshold (8MB > 4MB) - for optimized upload
        await file_client.upload_data(raw_data, chunk_size=8 * 1024 * 1024, overwrite=True, max_concurrency=3)
        data = await file_client.download_file()
        downloaded_data = await data.readall()
        self.assertEqual(raw_data, downloaded_data)

        # Run on single thread
        await file_client.upload_data(raw_data, chunk_size=8 * 1024 * 1024, overwrite=True)
        data = await file_client.download_file()
        downloaded_data = await data.readall()
        self.assertEqual(raw_data, downloaded_data)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_upload_data_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # parallel upload cannot be recorded
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        data = self.get_random_bytes(400*1024)
        await file_client.upload_data(data, overwrite=True, max_concurrency=5)

        downloaded_data = await (await file_client.download_file()).readall()
        self.assertEqual(data, downloaded_data)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_upload_data_to_existing_file_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        # create an existing file
        file_client = directory_client.get_file_client('filename')
        await file_client.create_file()
        await file_client.append_data(b"abc", 0)
        await file_client.flush_data(3)

        # to override the existing file
        data = self.get_random_bytes(100)
        with self.assertRaises(HttpResponseError):
            await file_client.upload_data(data, max_concurrency=5)
        await file_client.upload_data(data, overwrite=True, max_concurrency=5)

        downloaded_data = await (await file_client.download_file()).readall()
        self.assertEqual(data, downloaded_data)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_upload_data_to_existing_file_with_content_settings_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # etag in async recording file cannot be parsed properly
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        # create an existing file
        file_client = directory_client.get_file_client('filename')
        resp = await file_client.create_file()
        etag = resp['etag']

        # to override the existing file
        data = self.get_random_bytes(100)
        content_settings = ContentSettings(
            content_language='spanish',
            content_disposition='inline')

        await file_client.upload_data(data, max_concurrency=5,
                                      content_settings=content_settings, etag=etag,
                                      match_condition=MatchConditions.IfNotModified)

        downloaded_data = await (await file_client.download_file()).readall()
        properties = await file_client.get_file_properties()

        self.assertEqual(data, downloaded_data)
        self.assertEqual(properties.content_settings.content_language, content_settings.content_language)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_upload_data_to_existing_file_with_permissions_and_umask_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # etag in async recording file cannot be parsed properly
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(self.file_system_name, directory_name)
        await directory_client.create_directory()

        # create an existing file
        file_client = directory_client.get_file_client('filename')
        resp = await file_client.create_file()
        etag = resp['etag']

        # to override the existing file
        data = self.get_random_bytes(100)

        await file_client.upload_data(data,
                                      overwrite=True, max_concurrency=5,
                                      permissions='0777', umask="0000",
                                      etag=etag,
                                      match_condition=MatchConditions.IfNotModified)

        downloaded_data = await (await file_client.download_file()).readall()
        prop = await file_client.get_access_control()

        self.assertEqual(data, downloaded_data)
        self.assertEqual(prop['permissions'], 'rwxrwxrwx')

    @DataLakePreparer()
    async def test_read_file_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()
        data = self.get_random_bytes(1024)

        # upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # doanload the data and make sure it is the same as uploaded data
        downloaded_data = await (await file_client.download_file()).readall()
        self.assertEqual(data, downloaded_data)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_read_file_with_user_delegation_key_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # SAS URL is calculated from storage key, so this test runs live only

        # Create file
        file_client = await self._create_file_and_return_client()
        data = self.get_random_bytes(1024)
        # Upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # Get user delegation key
        token_credential = self.generate_oauth_token()
        service_client = DataLakeServiceClient(self.account_url(datalake_storage_account_name, 'dfs'), credential=token_credential)
        user_delegation_key = await service_client.get_user_delegation_key(datetime.utcnow(),
                                                                           datetime.utcnow() + timedelta(hours=1))

        sas_token = generate_file_sas(file_client.account_name,
                                      file_client.file_system_name,
                                      None,
                                      file_client.path_name,
                                      user_delegation_key,
                                      permission=FileSasPermissions(read=True, create=True, write=True, delete=True),
                                      expiry=datetime.utcnow() + timedelta(hours=1),
                                      )

        # doanload the data and make sure it is the same as uploaded data
        new_file_client = DataLakeFileClient(self.account_url(datalake_storage_account_name, 'dfs'),
                                             file_client.file_system_name,
                                             file_client.path_name,
                                             credential=sas_token)
        downloaded_data = await (await new_file_client.download_file()).readall()
        self.assertEqual(data, downloaded_data)

    @DataLakePreparer()
    async def test_read_file_into_file_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()
        data = self.get_random_bytes(1024)

        # upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # doanload the data into a file and make sure it is the same as uploaded data
        with open(FILE_PATH, 'wb') as stream:
            download = await file_client.download_file(max_concurrency=2)
            await download.readinto(stream)

        # Assert
        with open(FILE_PATH, 'rb') as stream:
            actual = stream.read()
            self.assertEqual(data, actual)

    @DataLakePreparer()
    async def test_read_file_to_text_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()
        data = self.get_random_text_data(1024)

        # upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # doanload the text data and make sure it is the same as uploaded data
        downloaded_data = await (await file_client.download_file(max_concurrency=2, encoding="utf-8")).readall()

        # Assert
        self.assertEqual(data, downloaded_data)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_account_sas_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # SAS URL is calculated from storage key, so this test runs live only

        file_name = self._get_file_reference()
        # create a file under root directory
        await self._create_file_and_return_client(file=file_name)

        # generate a token with file level read permission
        token = generate_account_sas(
            self.dsc.account_name,
            self.dsc.credential.account_key,
            ResourceTypes(file_system=True, object=True),
            AccountSasPermissions(read=True),
            datetime.utcnow() + timedelta(hours=1),
        )

        for credential in [token, AzureSasCredential(token)]:
            # read the created file which is under root directory
            file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, file_name, credential=credential)
            properties = await file_client.get_file_properties()

            # make sure we can read the file properties
            self.assertIsNotNone(properties)

            # try to write to the created file with the token
            with self.assertRaises(HttpResponseError):
                await file_client.append_data(b"abcd", 0, 4)

    @DataLakePreparer()
    async def test_account_sas_raises_if_sas_already_in_uri(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)

        with self.assertRaises(ValueError):
            DataLakeFileClient(self.dsc.url + "?sig=foo", self.file_system_name, "foo", credential=AzureSasCredential("?foo=bar"))

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_file_sas_only_applies_to_file_level_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # SAS URL is calculated from storage key, so this test runs live only

        file_name = self._get_file_reference()
        directory_name = self._get_directory_reference()
        await self._create_file_and_return_client(directory=directory_name, file=file_name)

        # generate a token with file level read and write permissions
        token = generate_file_sas(
            self.dsc.account_name,
            self.file_system_name,
            directory_name,
            file_name,
            self.dsc.credential.account_key,
            permission=FileSasPermissions(read=True, write=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )

        # read the created file which is under root directory
        file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, directory_name + '/' + file_name,
                                         credential=token)
        properties = await file_client.get_file_properties()

        # make sure we can read the file properties
        self.assertIsNotNone(properties)

        # try to write to the created file with the token
        response = await file_client.append_data(b"abcd", 0, 4, validate_content=True)
        self.assertIsNotNone(response)

        # the token is for file level, so users are not supposed to have access to file system level operations
        file_system_client = FileSystemClient(self.dsc.url, self.file_system_name, credential=token)
        with self.assertRaises(ClientAuthenticationError):
            await file_system_client.get_file_system_properties()

        # the token is for file level, so users are not supposed to have access to directory level operations
        directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name,
                                                   credential=token)
        with self.assertRaises(ClientAuthenticationError):
            await directory_client.get_directory_properties()

    @DataLakePreparer()
    async def test_delete_file_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_client = await self._create_file_and_return_client()

        await file_client.delete_file()

        with self.assertRaises(ResourceNotFoundError):
            await file_client.get_file_properties()

    @DataLakePreparer()
    async def test_delete_file_with_if_unmodified_since_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_client = await self._create_file_and_return_client()

        prop = await file_client.get_file_properties()
        await file_client.delete_file(if_unmodified_since=prop['last_modified'])

        # Make sure the file was deleted
        with self.assertRaises(ResourceNotFoundError):
            await file_client.get_file_properties()

    @DataLakePreparer()
    async def test_set_access_control_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()

        response = await file_client.set_access_control(permissions='0777')

        # Assert
        self.assertIsNotNone(response)

    @DataLakePreparer()
    async def test_set_access_control_with_match_conditions_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()

        with self.assertRaises(ResourceModifiedError):
            await file_client.set_access_control(permissions='0777', match_condition=MatchConditions.IfMissing)

    @DataLakePreparer()
    async def test_get_access_control_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()
        await file_client.set_access_control(permissions='0777')

        # Act
        response = await file_client.get_access_control()

        # Assert
        self.assertIsNotNone(response)

    @DataLakePreparer()
    async def test_get_access_control_with_if_modified_since_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()
        await file_client.set_access_control(permissions='0777')

        prop = await file_client.get_file_properties()

        # Act
        response = await file_client.get_access_control(if_modified_since=prop['last_modified'] - timedelta(minutes=15))

        # Assert
        self.assertIsNotNone(response)

    @DataLakePreparer()
    async def test_get_properties_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        directory_client = await self._create_directory_and_return_client()

        metadata = {'hello': 'world', 'number': '42'}
        content_settings = ContentSettings(
            content_language='spanish',
            content_disposition='inline')
        file_client = await directory_client.create_file("newfile", metadata=metadata,
                                                         content_settings=content_settings)
        await file_client.append_data(b"abc", 0, 3)
        await file_client.flush_data(3)
        properties = await file_client.get_file_properties()

        # Assert
        self.assertTrue(properties)
        self.assertEqual(properties.size, 3)
        self.assertEqual(properties.metadata['hello'], metadata['hello'])
        self.assertEqual(properties.content_settings.content_language, content_settings.content_language)

    @DataLakePreparer()
    async def test_set_access_control_recursive_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        acl = 'user::rwx,group::r-x,other::rwx'
        file_client = await self._create_file_and_return_client()

        summary = await file_client.set_access_control_recursive(acl=acl)

        # Assert
        self.assertEqual(summary.counters.directories_successful, 0)
        self.assertEqual(summary.counters.files_successful, 1)
        self.assertEqual(summary.counters.failure_count, 0)
        access_control = await file_client.get_access_control()
        self.assertIsNotNone(access_control)
        self.assertEqual(acl, access_control['acl'])

    @DataLakePreparer()
    async def test_update_access_control_recursive_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        acl = 'user::rwx,group::r-x,other::rwx'
        file_client = await self._create_file_and_return_client()

        summary = await file_client.update_access_control_recursive(acl=acl)

        # Assert
        self.assertEqual(summary.counters.directories_successful, 0)
        self.assertEqual(summary.counters.files_successful, 1)
        self.assertEqual(summary.counters.failure_count, 0)
        access_control = await file_client.get_access_control()
        self.assertIsNotNone(access_control)
        self.assertEqual(acl, access_control['acl'])

    @DataLakePreparer()
    async def test_remove_access_control_recursive_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        acl = "mask," + "default:user,default:group," + \
              "user:ec3595d6-2c17-4696-8caa-7e139758d24a,group:ec3595d6-2c17-4696-8caa-7e139758d24a," + \
              "default:user:ec3595d6-2c17-4696-8caa-7e139758d24a,default:group:ec3595d6-2c17-4696-8caa-7e139758d24a"
        file_client = await self._create_file_and_return_client()
        summary = await file_client.remove_access_control_recursive(acl=acl)

        # Assert
        self.assertEqual(summary.counters.directories_successful, 0)
        self.assertEqual(summary.counters.files_successful, 1)
        self.assertEqual(summary.counters.failure_count, 0)

    @DataLakePreparer()
    async def test_set_expiry_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        directory_client = await self._create_directory_and_return_client()

        metadata = {'hello': 'world', 'number': '42'}
        content_settings = ContentSettings(
            content_language='spanish',
            content_disposition='inline')
        expires_on = datetime.utcnow() + timedelta(hours=1)
        file_client = await directory_client.create_file("newfile", metadata=metadata, content_settings=content_settings)
        await file_client.set_file_expiry("Absolute", expires_on=expires_on)
        properties = await file_client.get_file_properties()

        # Assert
        self.assertTrue(properties)
        self.assertIsNotNone(properties.expiry_time)

    @DataLakePreparer()
    async def test_rename_file_with_non_used_name_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        file_client = await self._create_file_and_return_client()
        data_bytes = b"abc"
        await file_client.append_data(data_bytes, 0, 3)
        await file_client.flush_data(3)
        new_client = await file_client.rename_file(file_client.file_system_name + '/' + 'newname')

        data = await (await new_client.download_file()).readall()
        self.assertEqual(data, data_bytes)
        self.assertEqual(new_client.path_name, "newname")

    @DataLakePreparer()
    async def test_rename_file_to_existing_file_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # create the existing file
        existing_file_client = await self._create_file_and_return_client(file="existingfile")
        await existing_file_client.append_data(b"a", 0, 1)
        await existing_file_client.flush_data(1)
        old_url = existing_file_client.url

        # prepare to rename the file to the existing file
        file_client = await self._create_file_and_return_client()
        data_bytes = b"abc"
        await file_client.append_data(data_bytes, 0, 3)
        await file_client.flush_data(3)
        new_client = await file_client.rename_file(file_client.file_system_name + '/' + existing_file_client.path_name)
        new_url = file_client.url

        data = await (await new_client.download_file()).readall()
        # the existing file was overridden
        self.assertEqual(data, data_bytes)

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_rename_file_with_file_sas_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # SAS URL is calculated from storage key, so this test runs live only
        token = generate_file_sas(self.dsc.account_name,
                                  self.file_system_name,
                                  None,
                                  "oldfile",
                                  datalake_storage_account_key,
                                  permission=FileSasPermissions(read=True, create=True, write=True, delete=True, move=True),
                                  expiry=datetime.utcnow() + timedelta(hours=1),
                                  )

        new_token = generate_file_sas(self.dsc.account_name,
                                      self.file_system_name,
                                      None,
                                      "newname",
                                      datalake_storage_account_key,
                                      permission=FileSasPermissions(read=True, create=True, write=True, delete=True),
                                      expiry=datetime.utcnow() + timedelta(hours=1),
                                      )

        # read the created file which is under root directory
        file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, "oldfile", credential=token)
        await file_client.create_file()
        data_bytes = b"abc"
        await file_client.append_data(data_bytes, 0, 3)
        await file_client.flush_data(3)
        new_client = await file_client.rename_file(file_client.file_system_name+'/'+'newname'+'?'+new_token)

        data = await (await new_client.download_file()).readall()
        self.assertEqual(data, data_bytes)
        self.assertEqual(new_client.path_name, "newname")

    @pytest.mark.live_test_only
    @DataLakePreparer()
    async def test_rename_file_will_not_change_existing_directory_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # create none empty directory(with 2 files)
        dir1 = await self._create_directory_and_return_client(directory="dir1")
        f1 = await dir1.create_file("file1")
        await f1.append_data(b"file1", 0, 5)
        await f1.flush_data(5)
        f2 = await dir1.create_file("file2")
        await f2.append_data(b"file2", 0, 5)
        await f2.flush_data(5)

        # create another none empty directory(with 2 files)
        dir2 = await self._create_directory_and_return_client(directory="dir2")
        f3 = await dir2.create_file("file3")
        await f3.append_data(b"file3", 0, 5)
        await f3.flush_data(5)
        f4 = await dir2.create_file("file4")
        await f4.append_data(b"file4", 0, 5)
        await f4.flush_data(5)

        new_client = await f3.rename_file(f1.file_system_name + '/' + f1.path_name)

        self.assertEqual(await (await new_client.download_file()).readall(), b"file3")

        # make sure the data in file2 and file4 weren't touched
        f2_data = await (await f2.download_file()).readall()
        self.assertEqual(f2_data, b"file2")

        f4_data = await (await f4.download_file()).readall()
        self.assertEqual(f4_data, b"file4")

        with self.assertRaises(HttpResponseError):
            await (await f3.download_file()).readall()
Esempio n. 24
0
class FileSystemTest(StorageTestCase):
    def setUp(self):
        super(FileSystemTest, self).setUp()
        url = self._get_account_url()
        self.dsc = DataLakeServiceClient(
            url,
            credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
            transport=AiohttpTestTransport())
        self.config = self.dsc._config
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.dsc.__aenter__())
        self.test_file_systems = []

    def tearDown(self):
        if not self.is_playback():
            loop = asyncio.get_event_loop()
            try:
                for file_system in self.test_file_systems:
                    loop.run_until_complete(
                        self.dsc.delete_file_system(file_system))
                loop.run_until_complete(self.fsc.__aexit__())
            except:
                pass

        return super(FileSystemTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_file_system_reference(self, prefix=TEST_FILE_SYSTEM_PREFIX):
        file_system_name = self.get_resource_name(prefix)
        self.test_file_systems.append(file_system_name)
        return file_system_name

    async def _create_file_system(self,
                                  file_system_prefix=TEST_FILE_SYSTEM_PREFIX):
        return await self.dsc.create_file_system(
            self._get_file_system_reference(prefix=file_system_prefix))

    # --Helpers-----------------------------------------------------------------

    async def _test_create_file_system_async(self):
        # Arrange
        file_system_name = self._get_file_system_reference()

        # Act
        file_system_client = self.dsc.get_file_system_client(file_system_name)
        created = await file_system_client.create_file_system()

        # Assert
        self.assertTrue(created)

    @record
    def test_create_file_system_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_file_system_async())

    async def _test_create_file_system_with_metadata_async(self):
        # Arrange
        metadata = {'hello': 'world', 'number': '42'}
        file_system_name = self._get_file_system_reference()

        # Act
        file_system_client = self.dsc.get_file_system_client(file_system_name)
        created = await file_system_client.create_file_system(metadata=metadata
                                                              )

        # Assert
        properties = await file_system_client.get_file_system_properties()
        self.assertTrue(created)
        self.assertDictEqual(properties.metadata, metadata)

    @record
    def test_create_file_system_with_metadata_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_file_system_with_metadata_async())

    async def _test_list_file_systems_async(self):
        # Arrange
        file_system_name = self._get_file_system_reference()
        file_system = await self.dsc.create_file_system(file_system_name)

        # Act
        file_systems = []
        async for filesystem in self.dsc.list_file_systems():
            file_systems.append(filesystem)

        # Assert
        self.assertIsNotNone(file_systems)
        self.assertGreaterEqual(len(file_systems), 1)
        self.assertIsNotNone(file_systems[0])
        self.assertNamedItemInContainer(file_systems,
                                        file_system.file_system_name)
        self.assertIsNotNone(file_systems[0].has_immutability_policy)
        self.assertIsNotNone(file_systems[0].has_legal_hold)

    @record
    def test_list_file_systems_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_file_systems_async())

    async def _test_delete_file_system_with_existing_file_system_async(self):
        # Arrange
        file_system = await self._create_file_system()

        # Act
        deleted = await file_system.delete_file_system()

        # Assert
        self.assertIsNone(deleted)

    @record
    def test_delete_file_system_with_existing_file_system_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_delete_file_system_with_existing_file_system_async())

    async def _test_delete_none_existing_file_system_async(self):
        fake_file_system_client = self.dsc.get_file_system_client("fakeclient")

        # Act
        with self.assertRaises(ResourceNotFoundError):
            await fake_file_system_client.delete_file_system(
                match_condition=MatchConditions.IfMissing)

    @record
    def test_delete_none_existing_file_system_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_delete_none_existing_file_system_async())

    async def _test_list_file_systems_with_include_metadata_async(self):
        # Arrange
        file_system = await self._create_file_system()
        metadata = {'hello': 'world', 'number': '42'}
        await file_system.set_file_system_metadata(metadata)

        # Act
        file_systems = []
        async for fs in self.dsc.list_file_systems(
                name_starts_with=file_system.file_system_name,
                include_metadata=True):
            file_systems.append(fs)

        # Assert
        self.assertIsNotNone(file_systems)
        self.assertGreaterEqual(len(file_systems), 1)
        self.assertIsNotNone(file_systems[0])
        self.assertNamedItemInContainer(file_systems,
                                        file_system.file_system_name)
        self.assertDictEqual(file_systems[0].metadata, metadata)

    @record
    def test_list_file_systems_with_include_metadata_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_file_systems_with_include_metadata_async())

    async def _test_set_file_system_acl_async(self):
        # Act
        file_system = await self._create_file_system()
        access_policy = AccessPolicy(
            permission=FileSystemSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
            start=datetime.utcnow())
        signed_identifier1 = {'testid': access_policy}
        response = await file_system.set_file_system_access_policy(
            signed_identifier1, public_access=PublicAccess.FileSystem)

        self.assertIsNotNone(response.get('etag'))
        self.assertIsNotNone(response.get('last_modified'))

        acl1 = await file_system.get_file_system_access_policy()
        self.assertIsNotNone(acl1['public_access'])
        self.assertEqual(len(acl1['signed_identifiers']), 1)

        # If set signed identifier without specifying the access policy then it will be default to None
        signed_identifier2 = {'testid': access_policy, 'test2': access_policy}
        await file_system.set_file_system_access_policy(signed_identifier2)
        acl2 = await file_system.get_file_system_access_policy()
        self.assertIsNone(acl2['public_access'])
        self.assertEqual(len(acl2['signed_identifiers']), 2)

    @record
    def test_set_file_system_acl_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_set_file_system_acl_async())

    async def _test_list_file_systems_by_page_async(self):
        # Arrange
        for i in range(0, 6):
            await self._create_file_system(
                file_system_prefix="filesystem{}".format(i))

        # Act
        file_systems = []
        async for fs in await self.dsc.list_file_systems(
                results_per_page=3, name_starts_with="file",
                include_metadata=True).by_page().__anext__():
            file_systems.append(fs)

        # Assert
        self.assertIsNotNone(file_systems)
        self.assertGreaterEqual(len(file_systems), 3)

    @record
    def test_list_file_systems_by_page_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_file_systems_by_page_async())

    async def _test_list_file_systems_with_public_access_async(self):
        # Arrange
        file_system_name = self._get_file_system_reference()
        file_system = self.dsc.get_file_system_client(file_system_name)
        await file_system.create_file_system(public_access="blob")
        metadata = {'hello': 'world', 'number': '42'}
        await file_system.set_file_system_metadata(metadata)

        # Act
        file_systems = []
        async for fs in self.dsc.list_file_systems(
                name_starts_with=file_system.file_system_name,
                include_metadata=True):
            file_systems.append(fs)

        # Assert
        self.assertIsNotNone(file_systems)
        self.assertGreaterEqual(len(file_systems), 1)
        self.assertIsNotNone(file_systems[0])
        self.assertNamedItemInContainer(file_systems,
                                        file_system.file_system_name)
        self.assertDictEqual(file_systems[0].metadata, metadata)
        self.assertTrue(file_systems[0].public_access is PublicAccess.File)

    @record
    def test_list_file_systems_with_public_access_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_file_systems_with_public_access_async())

    async def _test_get_file_system_properties_async(self):
        # Arrange
        metadata = {'hello': 'world', 'number': '42'}
        file_system = await self._create_file_system()
        await file_system.set_file_system_metadata(metadata)

        # Act
        props = await file_system.get_file_system_properties()

        # Assert
        self.assertIsNotNone(props)
        self.assertDictEqual(props.metadata, metadata)
        self.assertIsNotNone(props.has_immutability_policy)
        self.assertIsNotNone(props.has_legal_hold)

    @record
    def test_get_file_system_properties_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_file_system_properties_async())

    async def _test_list_paths_async(self):
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            await file_system.create_directory("dir1{}".format(i))

        paths = []
        async for path in file_system.get_paths(upn=True):
            paths.append(path)

        self.assertEqual(len(paths), 6)

    @record
    def test_list_paths_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_paths_async())

    async def _test_list_paths_which_are_all_files_async(self):
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            await file_system.create_file("file{}".format(i))

        paths = []
        async for path in file_system.get_paths(upn=True):
            paths.append(path)

        self.assertEqual(len(paths), 6)

    @record
    def test_list_paths_which_are_all_files_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_paths_which_are_all_files_async())

    async def _test_list_paths_with_max_per_page_async(self):
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            await file_system.create_directory("dir1{}".format(i))

        generator1 = file_system.get_paths(max_results=2, upn=True).by_page()
        paths1 = []
        async for path in await generator1.__anext__():
            paths1.append(path)

        generator2 = file_system.get_paths(max_results=4, upn=True) \
            .by_page(continuation_token=generator1.continuation_token)
        paths2 = []
        async for path in await generator2.__anext__():
            paths2.append(path)

        self.assertEqual(len(paths1), 2)
        self.assertEqual(len(paths2), 4)

    @record
    def test_list_paths_with_max_per_page_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_paths_with_max_per_page_async())

    async def _test_list_paths_under_specific_path_async(self):
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            dir = await file_system.create_directory("dir1{}".format(i))

            # create a subdirectory under the current directory
            subdir = await dir.create_sub_directory("subdir")
            await subdir.create_sub_directory("subsub")

            # create a file under the current directory
            file_client = await subdir.create_file("file")
            await file_client.append_data(b"abced", 0, 5)
            await file_client.flush_data(5)

        generator1 = file_system.get_paths(path="dir10/subdir",
                                           max_results=2,
                                           upn=True).by_page()
        paths = []
        async for path in await generator1.__anext__():
            paths.append(path)

        self.assertEqual(len(paths), 2)
        self.assertEqual(paths[0].content_length, 5)

    @record
    def test_list_paths_under_specific_path_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_list_paths_under_specific_path_async())

    async def _test_list_paths_recursively_async(self):
        # Arrange
        file_system = await self._create_file_system()
        for i in range(0, 6):
            dir = await file_system.create_directory("dir1{}".format(i))

            # create a subdirectory under the current directory
            subdir = await dir.create_sub_directory("subdir")
            await subdir.create_sub_directory("subsub")

            # create a file under the current directory
            await subdir.create_file("file")

        paths = []
        async for path in file_system.get_paths(recursive=True, upn=True):
            paths.append(path)

        # there are 24 subpaths in total
        self.assertEqual(len(paths), 24)

    @record
    def test_list_paths_recursively_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_list_paths_recursively_async())

    async def _test_create_directory_from_file_system_client_async(self):
        # Arrange
        file_system = await self._create_file_system()
        await file_system.create_directory("dir1/dir2")

        paths = []
        async for path in file_system.get_paths(recursive=False, upn=True):
            paths.append(path)

        self.assertEqual(len(paths), 1)
        self.assertEqual(paths[0].name, "dir1")

    @record
    def test_create_directory_from_file_system_client_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_directory_from_file_system_client_async())

    async def _test_create_file_from_file_system_client_async(self):
        # Arrange
        file_system = await self._create_file_system()
        await file_system.create_file("dir1/dir2/file")

        paths = []
        async for path in file_system.get_paths(recursive=True, upn=True):
            paths.append(path)
        self.assertEqual(len(paths), 3)
        self.assertEqual(paths[0].name, "dir1")
        self.assertEqual(paths[2].is_directory, False)

    @record
    def test_create_file_from_file_system_client_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_file_from_file_system_client_async())
class LargeFileTest(StorageTestCase):
    def setUp(self):
        super(LargeFileTest, self).setUp()
        url = self._get_account_url()
        self.payload_dropping_policy = PayloadDroppingPolicy()
        credential_policy = _format_shared_key_credential(
            self.settings.STORAGE_DATA_LAKE_ACCOUNT_NAME,
            self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY)
        self.dsc = DataLakeServiceClient(
            url,
            credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
            _additional_pipeline_policies=[
                self.payload_dropping_policy, credential_policy
            ])
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.dsc.__aenter__())

        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(
                self.file_system_name)
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(
                    file_system.create_file_system(timeout=5))

            except ResourceExistsError:
                pass

    def tearDown(self):
        if not self.is_playback():
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(
                    self.dsc.delete_file_system(self.file_system_name))
                loop.run_until_complete(self.dsc.__aexit__())
            except:
                pass

        return super(LargeFileTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_directory_reference(self, prefix=TEST_DIRECTORY_PREFIX):
        directory_name = self.get_resource_name(prefix)
        return directory_name

    # --Helpers-----------------------------------------------------------------

    async def _test_append_large_stream_without_network(self):
        directory_name = self._get_directory_reference()

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        await file_client.create_file()

        data = LargeStream(LARGEST_BLOCK_SIZE)

        # Act
        response = await file_client.append_data(data, 0, LARGEST_BLOCK_SIZE)

        self.assertIsNotNone(response)
        self.assertEqual(self.payload_dropping_policy.append_counter, 1)
        self.assertEqual(self.payload_dropping_policy.append_sizes[0],
                         LARGEST_BLOCK_SIZE)

    @pytest.mark.live_test_only
    def test_append_large_stream_without_network(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_append_large_stream_without_network())

    async def _test_upload_large_stream_without_network(self):
        directory_name = self.get_resource_name(TEST_DIRECTORY_PREFIX)

        # Create a directory to put the file under that
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        file_client = directory_client.get_file_client('filename')
        await file_client.create_file()

        length = 2 * LARGEST_BLOCK_SIZE
        data = LargeStream(length)

        # Act
        response = await file_client.upload_data(data,
                                                 length,
                                                 overwrite=True,
                                                 chunk_size=LARGEST_BLOCK_SIZE)

        self.assertIsNotNone(response)
        self.assertEqual(self.payload_dropping_policy.append_counter, 2)
        self.assertEqual(self.payload_dropping_policy.append_sizes[0],
                         LARGEST_BLOCK_SIZE)
        self.assertEqual(self.payload_dropping_policy.append_sizes[1],
                         LARGEST_BLOCK_SIZE)

    @pytest.mark.live_test_only
    def test_upload_large_stream_without_network(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_upload_large_stream_without_network())
class DirectoryTest(StorageTestCase):
    def setUp(self):
        super(DirectoryTest, self).setUp()
        url = self._get_account_url()
        self.dsc = DataLakeServiceClient(
            url,
            credential=self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
            transport=AiohttpTestTransport())

        loop = asyncio.get_event_loop()
        loop.run_until_complete(self.dsc.__aenter__())
        self.config = self.dsc._config

        self.file_system_name = self.get_resource_name('filesystem')

        if not self.is_playback():
            file_system = self.dsc.get_file_system_client(
                self.file_system_name)
            try:
                loop.run_until_complete(
                    file_system.create_file_system(timeout=5))
            except ResourceExistsError:
                pass

    def tearDown(self):
        if not self.is_playback():
            try:
                loop = asyncio.get_event_loop()
                loop.run_until_complete(
                    self.dsc.delete_file_system(self.file_system_name))
                loop.run_until_complete(self.dsc.__aexit__())
            except:
                pass

        return super(DirectoryTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_directory_reference(self, prefix=TEST_DIRECTORY_PREFIX):
        directory_name = self.get_resource_name(prefix)
        return directory_name

    async def _create_directory_and_get_directory_client(
            self, directory_name=None):
        directory_name = directory_name if directory_name else self._get_directory_reference(
        )
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()
        return directory_client

    async def _create_file_system(self):
        return await self.dsc.create_file_system(
            self._get_file_system_reference())

    # --Helpers-----------------------------------------------------------------

    async def _test_create_directory(self):
        # Arrange
        directory_name = self._get_directory_reference()
        content_settings = ContentSettings(content_language='spanish',
                                           content_disposition='inline')
        # Act
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        created = await directory_client.create_directory(
            content_settings=content_settings)

        # Assert
        self.assertTrue(created)

    @record
    def test_create_directory_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_directory())

    async def _test_using_oauth_token_credential_to_create_directory(self):
        # generate a token with directory level create permission
        directory_name = self._get_directory_reference()

        token_credential = self.generate_async_oauth_token()
        directory_client = DataLakeDirectoryClient(self.dsc.url,
                                                   self.file_system_name,
                                                   directory_name,
                                                   credential=token_credential)
        response = await directory_client.create_directory()
        self.assertIsNotNone(response)

    @record
    def test_using_oauth_token_credential_to_create_directory_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_using_oauth_token_credential_to_create_directory())

    async def _test_create_directory_with_match_conditions(self):
        # Arrange
        directory_name = self._get_directory_reference()

        # Act
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        created = await directory_client.create_directory(
            match_condition=MatchConditions.IfMissing)

        # Assert
        self.assertTrue(created)

    @record
    def test_create_directory_with_match_conditions_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_directory_with_match_conditions())

    async def _test_create_directory_with_permission(self):
        # Arrange
        directory_name = self._get_directory_reference()

        # Act
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        created = await directory_client.create_directory(
            permissions="rwxr--r--", umask="0000")

        prop = await directory_client.get_access_control()

        # Assert
        self.assertTrue(created)
        self.assertEqual(prop['permissions'], 'rwxr--r--')

    @record
    def test_create_directory_with_permission_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_directory_with_permission())

    async def _test_create_directory_with_content_settings(self):
        # Arrange
        directory_name = self._get_directory_reference()
        content_settings = ContentSettings(content_language='spanish',
                                           content_disposition='inline')
        # Act
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        created = await directory_client.create_directory(
            content_settings=content_settings)

        # Assert
        self.assertTrue(created)

    @record
    def test_create_directory_with_content_settings_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_directory_with_content_settings())

    async def _test_create_directory_with_metadata(self):
        # Arrange
        directory_name = self._get_directory_reference()
        metadata = {'hello': 'world', 'number': '42'}
        # Act
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        created = await directory_client.create_directory(metadata=metadata)

        properties = await directory_client.get_directory_properties()

        # Assert
        self.assertTrue(created)

    @record
    def test_create_directory_with_metadata_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_create_directory_with_metadata())

    async def _test_delete_directory(self):
        # Arrange
        directory_name = self._get_directory_reference()
        metadata = {'hello': 'world', 'number': '42'}
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory(metadata=metadata)

        response = await directory_client.delete_directory()
        # Assert
        self.assertIsNone(response)

    @record
    def test_delete_directory_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_delete_directory())

    async def _test_delete_directory_with_if_modified_since(self):
        # Arrange
        directory_name = self._get_directory_reference()

        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()
        prop = await directory_client.get_directory_properties()

        with self.assertRaises(ResourceModifiedError):
            await directory_client.delete_directory(
                if_modified_since=prop['last_modified'])

    @record
    def test_delete_directory_with_if_modified_since_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_delete_directory_with_if_modified_since())

    async def _test_create_sub_directory_and_delete_sub_directory(self):
        # Arrange
        directory_name = self._get_directory_reference()
        metadata = {'hello': 'world', 'number': '42'}

        # Create a directory first, to prepare for creating sub directory
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory(metadata=metadata)

        # Create sub directory from the current directory
        sub_directory_name = 'subdir'
        sub_directory_created = await directory_client.create_sub_directory(
            sub_directory_name)

        # to make sure the sub directory was indeed created by get sub_directory properties from sub directory client
        sub_directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name + '/' + sub_directory_name)
        sub_properties = await sub_directory_client.get_directory_properties()

        # Assert
        self.assertTrue(sub_directory_created)
        self.assertTrue(sub_properties)

        # Act
        await directory_client.delete_sub_directory(sub_directory_name)
        with self.assertRaises(ResourceNotFoundError):
            await sub_directory_client.get_directory_properties()

    @record
    def test_create_sub_directory_and_delete_sub_directory_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_create_sub_directory_and_delete_sub_directory())

    async def _test_set_access_control(self):
        directory_name = self._get_directory_reference()
        metadata = {'hello': 'world', 'number': '42'}
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory(metadata=metadata)

        response = await directory_client.set_access_control(permissions='0777'
                                                             )
        # Assert
        self.assertIsNotNone(response)

    @record
    def test_set_access_control_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_set_access_control())

    async def _test_set_access_control_with_acl(self):
        directory_name = self._get_directory_reference()
        metadata = {'hello': 'world', 'number': '42'}
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory(metadata=metadata)

        acl = 'user::rwx,group::r-x,other::rwx'
        await directory_client.set_access_control(acl=acl)
        access_control = await directory_client.get_access_control()

        # Assert

        self.assertIsNotNone(access_control)
        self.assertEqual(acl, access_control['acl'])

    @record
    def test_set_access_control_with_acl_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_set_access_control_with_acl())

    async def _test_set_access_control_if_none_modified(self):
        directory_name = self._get_directory_reference()
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        resp = await directory_client.create_directory()

        response = await directory_client.set_access_control(
            permissions='0777',
            etag=resp['etag'],
            match_condition=MatchConditions.IfNotModified)
        # Assert
        self.assertIsNotNone(response)

    @record
    def test_set_access_control_if_none_modified_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_set_access_control_if_none_modified())

    async def _test_get_access_control(self):
        directory_name = self._get_directory_reference()
        metadata = {'hello': 'world', 'number': '42'}
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory(metadata=metadata,
                                                permissions='0777')

        # Act
        response = await directory_client.get_access_control()
        # Assert
        self.assertIsNotNone(response)

    @record
    def test_get_access_control_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_access_control())

    async def _test_get_access_control_with_match_conditions(self):
        directory_name = self._get_directory_reference()
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        resp = await directory_client.create_directory(permissions='0777',
                                                       umask='0000')

        # Act
        response = await directory_client.get_access_control(
            etag=resp['etag'], match_condition=MatchConditions.IfNotModified)
        # Assert
        self.assertIsNotNone(response)
        self.assertEquals(response['permissions'], 'rwxrwxrwx')

    @record
    def test_get_access_control_with_match_conditions_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_get_access_control_with_match_conditions())

    async def _test_rename_from(self):
        content_settings = ContentSettings(content_language='spanish',
                                           content_disposition='inline')
        directory_name = self._get_directory_reference()
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory()

        new_name = "newname"

        new_directory_client = self.dsc.get_directory_client(
            self.file_system_name, new_name)

        await new_directory_client._rename_path(
            '/' + self.file_system_name + '/' + directory_name,
            content_settings=content_settings)
        properties = await new_directory_client.get_directory_properties()

        self.assertIsNotNone(properties)
        self.assertIsNone(properties.get('content_settings'))

    @record
    def test_rename_from_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_rename_from())

    async def _test_rename_from_a_shorter_directory_to_longer_directory(self):
        # TODO: investigate why rename shorter path to a longer one does not work
        pytest.skip("")
        directory_name = self._get_directory_reference()
        await self._create_directory_and_get_directory_client(
            directory_name="old")

        new_name = "newname"
        new_directory_client = await self._create_directory_and_get_directory_client(
            directory_name=new_name)
        new_directory_client = await new_directory_client.create_sub_directory(
            "newsub")

        await new_directory_client._rename_path('/' + self.file_system_name +
                                                '/' + directory_name)
        properties = await new_directory_client.get_directory_properties()

        self.assertIsNotNone(properties)

    @record
    def test_rename_from_a_shorter_directory_to_longer_directory_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_rename_from_a_shorter_directory_to_longer_directory())

    async def _test_rename_from_a_directory_in_another_file_system(self):
        # create a file dir1 under file system1
        old_file_system_name = self._get_directory_reference("oldfilesystem")
        old_dir_name = "olddir"
        old_client = self.dsc.get_file_system_client(old_file_system_name)
        time.sleep(30)
        await old_client.create_file_system()
        await old_client.create_directory(old_dir_name)

        # create a dir2 under file system2
        new_name = "newname"
        time.sleep(5)
        new_directory_client = await self._create_directory_and_get_directory_client(
            directory_name=new_name)
        new_directory_client = await new_directory_client.create_sub_directory(
            "newsub")

        # rename dir1 under file system1 to dir2 under file system2
        await new_directory_client._rename_path('/' + old_file_system_name +
                                                '/' + old_dir_name)
        properties = await new_directory_client.get_directory_properties()

        self.assertIsNotNone(properties)

    @record
    def test_rename_from_a_directory_in_another_file_system_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_rename_from_a_directory_in_another_file_system())

    async def _test_rename_to_an_existing_directory_in_another_file_system(
            self):
        # create a file dir1 under file system1
        destination_file_system_name = self._get_directory_reference(
            "destfilesystem")
        destination_dir_name = "destdir"
        fs_client = self.dsc.get_file_system_client(
            destination_file_system_name)
        time.sleep(30)
        await fs_client.create_file_system()
        destination_directory_client = await fs_client.create_directory(
            destination_dir_name)

        # create a dir2 under file system2
        source_name = "source"
        source_directory_client = await self._create_directory_and_get_directory_client(
            directory_name=source_name)
        source_directory_client = await source_directory_client.create_sub_directory(
            "subdir")

        # rename dir2 under file system2 to dir1 under file system1
        res = await source_directory_client.rename_directory(
            '/' + destination_file_system_name + '/' + destination_dir_name)

        # the source directory has been renamed to destination directory, so it cannot be found
        with self.assertRaises(HttpResponseError):
            await source_directory_client.get_directory_properties()

        self.assertEquals(res.url, destination_directory_client.url)

    @record
    def test_rename_to_an_existing_directory_in_another_file_system_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_rename_to_an_existing_directory_in_another_file_system(
            ))

    async def _test_rename_with_none_existing_destination_condition_and_source_unmodified_condition(
            self):
        non_existing_dir_name = "nonexistingdir"

        # create a file system1
        destination_file_system_name = self._get_directory_reference(
            "destfilesystem")
        fs_client = self.dsc.get_file_system_client(
            destination_file_system_name)
        await fs_client.create_file_system()

        # create a dir2 under file system2
        source_name = "source"
        source_directory_client = await self._create_directory_and_get_directory_client(
            directory_name=source_name)
        source_directory_client = await source_directory_client.create_sub_directory(
            "subdir")

        # rename dir2 under file system2 to a non existing directory under file system1,
        # when dir1 does not exist and dir2 wasn't modified
        properties = await source_directory_client.get_directory_properties()
        etag = properties['etag']
        res = await source_directory_client.rename_directory(
            '/' + destination_file_system_name + '/' + non_existing_dir_name,
            match_condition=MatchConditions.IfMissing,
            source_etag=etag,
            source_match_condition=MatchConditions.IfNotModified)

        # the source directory has been renamed to destination directory, so it cannot be found
        with self.assertRaises(HttpResponseError):
            await source_directory_client.get_directory_properties()

        self.assertEquals(non_existing_dir_name, res.path_name)

    @record
    def test_rename_with_none_existing_destination_condition_and_source_unmodified_condition_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_rename_with_none_existing_destination_condition_and_source_unmodified_condition(
            ))

    async def _test_rename_to_an_non_existing_directory_in_another_file_system(
            self):
        # create a file dir1 under file system1
        destination_file_system_name = self._get_directory_reference(
            "destfilesystem")
        non_existing_dir_name = "nonexistingdir"
        fs_client = self.dsc.get_file_system_client(
            destination_file_system_name)
        await fs_client.create_file_system()

        # create a dir2 under file system2
        source_name = "source"
        source_directory_client = await self._create_directory_and_get_directory_client(
            directory_name=source_name)
        source_directory_client = await source_directory_client.create_sub_directory(
            "subdir")

        # rename dir2 under file system2 to dir1 under file system1
        res = await source_directory_client.rename_directory(
            '/' + destination_file_system_name + '/' + non_existing_dir_name)

        # the source directory has been renamed to destination directory, so it cannot be found
        with self.assertRaises(HttpResponseError):
            await source_directory_client.get_directory_properties()

        self.assertEquals(non_existing_dir_name, res.path_name)

    @record
    def test_rename_to_an_non_existing_directory_in_another_file_system_async(
            self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self.
            _test_rename_to_an_non_existing_directory_in_another_file_system())

    async def _test_rename_directory_to_non_empty_directory(self):
        # TODO: investigate why rename non empty dir doesn't work
        pytest.skip("")
        dir1 = await self._create_directory_and_get_directory_client("dir1")
        await dir1.create_sub_directory("subdir")

        dir2 = await self._create_directory_and_get_directory_client("dir2")
        await dir2.rename_directory(dir1.file_system_name + '/' +
                                    dir1.path_name)

        with self.assertRaises(HttpResponseError):
            await dir2.get_directory_properties()

    @record
    def test_rename_directory_to_non_empty_directory_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(
            self._test_rename_directory_to_non_empty_directory())

    async def _test_get_properties(self):
        # Arrange
        directory_name = self._get_directory_reference()
        metadata = {'hello': 'world', 'number': '42'}
        directory_client = self.dsc.get_directory_client(
            self.file_system_name, directory_name)
        await directory_client.create_directory(metadata=metadata)

        properties = await directory_client.get_directory_properties()
        # Assert
        self.assertTrue(properties)
        self.assertIsNotNone(properties.metadata)
        self.assertEqual(properties.metadata['hello'], metadata['hello'])

    @record
    def test_get_properties_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_get_properties())

    async def _test_using_directory_sas_to_read(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        client = await self._create_directory_and_get_directory_client()
        directory_name = client.path_name

        # generate a token with directory level read permission
        token = generate_directory_sas(
            self.dsc.account_name,
            self.file_system_name,
            directory_name,
            self.dsc.credential.account_key,
            permission=DirectorySasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )

        directory_client = DataLakeDirectoryClient(self.dsc.url,
                                                   self.file_system_name,
                                                   directory_name,
                                                   credential=token)
        access_control = await directory_client.get_access_control()

        self.assertIsNotNone(access_control)

    @record
    def test_using_directory_sas_to_read_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_using_directory_sas_to_read())

    async def _test_using_directory_sas_to_create(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # generate a token with directory level create permission
        directory_name = self._get_directory_reference()
        token = generate_directory_sas(
            self.dsc.account_name,
            self.file_system_name,
            directory_name,
            self.dsc.credential.account_key,
            permission=DirectorySasPermissions(create=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        directory_client = DataLakeDirectoryClient(self.dsc.url,
                                                   self.file_system_name,
                                                   directory_name,
                                                   credential=token)
        response = await directory_client.create_directory()
        self.assertIsNotNone(response)

    @record
    def test_using_directory_sas_to_create_async(self):
        loop = asyncio.get_event_loop()
        loop.run_until_complete(self._test_using_directory_sas_to_create())
Esempio n. 27
0
async def data_lake_service_sample():

    # Instantiate a DataLakeServiceClient using a connection string
    # [START create_datalake_service_client]
    from azure.storage.filedatalake.aio import DataLakeServiceClient
    datalake_service_client = DataLakeServiceClient.from_connection_string(
        connection_string)
    # [END create_datalake_service_client]

    # Instantiate a DataLakeServiceClient Azure Identity credentials.
    # [START create_datalake_service_client_oauth]
    from azure.identity.aio import ClientSecretCredential
    token_credential = ClientSecretCredential(
        active_directory_tenant_id,
        active_directory_application_id,
        active_directory_application_secret,
    )
    datalake_service_client = DataLakeServiceClient(
        "https://{}.dfs.core.windows.net".format(account_name),
        credential=token_credential)
    # [END create_datalake_service_client_oauth]
    async with datalake_service_client:
        # get user delegation key
        # [START get_user_delegation_key]
        from datetime import datetime, timedelta
        user_delegation_key = await datalake_service_client.get_user_delegation_key(
            datetime.utcnow(),
            datetime.utcnow() + timedelta(hours=1))
        # [END get_user_delegation_key]

        # Create file systems
        # [START create_file_system_from_service_client]
        await datalake_service_client.create_file_system("filesystem")
        # [END create_file_system_from_service_client]
        file_system_client = await datalake_service_client.create_file_system(
            "anotherfilesystem")

        # List file systems
        # [START list_file_systems]
        file_systems = datalake_service_client.list_file_systems()
        async for file_system in file_systems:
            print(file_system.name)
        # [END list_file_systems]

        # Get Clients from DataLakeServiceClient
        file_system_client = datalake_service_client.get_file_system_client(
            file_system_client.file_system_name)
        # [START get_directory_client_from_service_client]
        directory_client = datalake_service_client.get_directory_client(
            file_system_client.file_system_name, "mydirectory")
        # [END get_directory_client_from_service_client]
        # [START get_file_client_from_service_client]
        file_client = datalake_service_client.get_file_client(
            file_system_client.file_system_name, "myfile")
        # [END get_file_client_from_service_client]

        # Create file and set properties
        metadata = {'hello': 'world', 'number': '42'}
        from azure.storage.filedatalake import ContentSettings
        content_settings = ContentSettings(content_language='spanish',
                                           content_disposition='inline')
        await file_client.create_file(content_settings=content_settings)
        await file_client.set_metadata(metadata=metadata)
        file_props = await file_client.get_file_properties()
        print(file_props.metadata)

        # Create file/directory and set properties
        await directory_client.create_directory(
            content_settings=content_settings, metadata=metadata)
        dir_props = await directory_client.get_directory_properties()
        print(dir_props.metadata)

        # Delete File Systems
        # [START delete_file_system_from_service_client]
        await datalake_service_client.delete_file_system("filesystem")
        # [END delete_file_system_from_service_client]
        await file_system_client.delete_file_system()

    await token_credential.close()