async def _test_read_file_with_user_delegation_key(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Create file
        file_client = await self._create_file_and_return_client()
        data = self.get_random_bytes(1024)
        # Upload data to file
        await file_client.append_data(data, 0, len(data))
        await file_client.flush_data(len(data))

        # Get user delegation key
        token_credential = self.generate_async_oauth_token()
        service_client = DataLakeServiceClient(self._get_oauth_account_url(), credential=token_credential)
        user_delegation_key = await service_client.get_user_delegation_key(datetime.utcnow(),
                                                                     datetime.utcnow() + timedelta(hours=1))

        sas_token = generate_file_sas(file_client.account_name,
                                      file_client.file_system_name,
                                      None,
                                      file_client.path_name,
                                      user_delegation_key=user_delegation_key,
                                      permission=FileSasPermissions(read=True, create=True, write=True, delete=True),
                                      expiry=datetime.utcnow() + timedelta(hours=1),
                                      )

        # doanload the data and make sure it is the same as uploaded data
        new_file_client = DataLakeFileClient(self._get_account_url(),
                                             file_client.file_system_name,
                                             file_client.path_name,
                                             credential=sas_token)
        downloaded_data = await new_file_client.read_file()
        self.assertEqual(data, downloaded_data)
示例#2
0
    async def _test_account_sas(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        file_name = self._get_file_reference()
        # create a file under root directory
        await self._create_file_and_return_client(file=file_name)

        # generate a token with file level read permission
        token = generate_account_sas(
            self.dsc.account_name,
            self.dsc.credential.account_key,
            ResourceTypes(file_system=True, object=True),
            AccountSasPermissions(read=True),
            datetime.utcnow() + timedelta(hours=1),
        )

        # read the created file which is under root directory
        file_client = DataLakeFileClient(self.dsc.url,
                                         self.file_system_name,
                                         file_name,
                                         credential=token)
        properties = await file_client.get_file_properties()

        # make sure we can read the file properties
        self.assertIsNotNone(properties)

        # try to write to the created file with the token
        with self.assertRaises(HttpResponseError):
            await file_client.append_data(b"abcd", 0, 4)
    async def test_rename_file_with_file_sas_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # SAS URL is calculated from storage key, so this test runs live only
        token = generate_file_sas(self.dsc.account_name,
                                  self.file_system_name,
                                  None,
                                  "oldfile",
                                  datalake_storage_account_key,
                                  permission=FileSasPermissions(read=True, create=True, write=True, delete=True, move=True),
                                  expiry=datetime.utcnow() + timedelta(hours=1),
                                  )

        new_token = generate_file_sas(self.dsc.account_name,
                                      self.file_system_name,
                                      None,
                                      "newname",
                                      datalake_storage_account_key,
                                      permission=FileSasPermissions(read=True, create=True, write=True, delete=True),
                                      expiry=datetime.utcnow() + timedelta(hours=1),
                                      )

        # read the created file which is under root directory
        file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, "oldfile", credential=token)
        await file_client.create_file()
        data_bytes = b"abc"
        await file_client.append_data(data_bytes, 0, 3)
        await file_client.flush_data(3)
        new_client = await file_client.rename_file(file_client.file_system_name+'/'+'newname'+'?'+new_token)

        data = await (await new_client.download_file()).readall()
        self.assertEqual(data, data_bytes)
        self.assertEqual(new_client.path_name, "newname")
    async def _test_rename_file_with_file_sas(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return
        token = generate_file_sas(self.dsc.account_name,
                                  self.file_system_name,
                                  None,
                                  "oldfile",
                                  self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
                                  permission=FileSasPermissions(read=True, create=True, write=True, delete=True, move=True),
                                  expiry=datetime.utcnow() + timedelta(hours=1),
                                  )

        new_token = generate_file_sas(self.dsc.account_name,
                                      self.file_system_name,
                                      None,
                                      "newname",
                                      self.settings.STORAGE_DATA_LAKE_ACCOUNT_KEY,
                                      permission=FileSasPermissions(read=True, create=True, write=True, delete=True),
                                      expiry=datetime.utcnow() + timedelta(hours=1),
                                      )

        # read the created file which is under root directory
        file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, "oldfile", credential=token)
        await file_client.create_file()
        data_bytes = b"abc"
        await file_client.append_data(data_bytes, 0, 3)
        await file_client.flush_data(3)
        new_client = await file_client.rename_file(file_client.file_system_name+'/'+'newname'+'?'+new_token)

        data = await (await new_client.download_file()).readall()
        self.assertEqual(data, data_bytes)
        self.assertEqual(new_client.path_name, "newname")
示例#5
0
    async def _test_file_sas_only_applies_to_file_level(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        file_name = self._get_file_reference()
        directory_name = self._get_directory_reference()
        await self._create_file_and_return_client(directory=directory_name,
                                                  file=file_name)

        # generate a token with file level read and write permissions
        token = generate_file_sas(
            self.dsc.account_name,
            self.file_system_name,
            directory_name,
            file_name,
            self.dsc.credential.account_key,
            permission=FileSasPermissions(read=True, write=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )

        # read the created file which is under root directory
        file_client = DataLakeFileClient(self.dsc.url,
                                         self.file_system_name,
                                         directory_name + '/' + file_name,
                                         credential=token)
        properties = await file_client.get_file_properties()

        # make sure we can read the file properties
        self.assertIsNotNone(properties)

        # try to write to the created file with the token
        response = await file_client.append_data(b"abcd",
                                                 0,
                                                 4,
                                                 validate_content=True)
        self.assertIsNotNone(response)

        # the token is for file level, so users are not supposed to have access to file system level operations
        file_system_client = FileSystemClient(self.dsc.url,
                                              self.file_system_name,
                                              credential=token)
        with self.assertRaises(ClientAuthenticationError):
            await file_system_client.get_file_system_properties()

        # the token is for file level, so users are not supposed to have access to directory level operations
        directory_client = DataLakeDirectoryClient(self.dsc.url,
                                                   self.file_system_name,
                                                   directory_name,
                                                   credential=token)
        with self.assertRaises(ClientAuthenticationError):
            await directory_client.get_directory_properties()
    async def _test_create_file_using_oauth_token_credential(self):
        # Arrange
        file_name = self._get_file_reference()
        token_credential = self.generate_async_oauth_token()

        # Create a directory to put the file under that
        file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, file_name,
                                         credential=token_credential)

        response = await file_client.create_file()

        # Assert
        self.assertIsNotNone(response)
    async def test_create_file_using_oauth_token_credential_async(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)
        # Arrange
        file_name = self._get_file_reference()
        token_credential = self.generate_oauth_token()

        # Create a directory to put the file under that
        file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, file_name,
                                         credential=token_credential)

        response = await file_client.create_file()

        # Assert
        self.assertIsNotNone(response)
示例#8
0
 def test_account_sas_raises_if_sas_already_in_uri(self):
     with self.assertRaises(ValueError):
         DataLakeFileClient(self.dsc.url + "?sig=foo",
                            self.file_system_name,
                            "foo",
                            credential=AzureSasCredential("?foo=bar"))
示例#9
0
def instantiate_file_client_from_conn_str():
    # [START instantiate_file_client_from_conn_str]
    from azure.storage.filedatalake.aio import DataLakeFileClient
    DataLakeFileClient.from_connection_string(connection_string, "myfilesystem", "mydirectory", "myfile")
    async def test_account_sas_raises_if_sas_already_in_uri(self, datalake_storage_account_name, datalake_storage_account_key):
        await self._setUp(datalake_storage_account_name, datalake_storage_account_key)

        with self.assertRaises(ValueError):
            DataLakeFileClient(self.dsc.url + "?sig=foo", self.file_system_name, "foo", credential=AzureSasCredential("?foo=bar"))