async def test_list_paths_using_file_sys_delegation_sas_async( self, datalake_storage_account_name, datalake_storage_account_key): self._setUp(datalake_storage_account_name, datalake_storage_account_key) url = self._get_account_url(datalake_storage_account_name) token_credential = self.generate_oauth_token() dsc = DataLakeServiceClient(url, token_credential) file_system_name = self._get_file_system_reference() directory_client_name = '/' directory_client = (await dsc.create_file_system(file_system_name)).get_directory_client(directory_client_name) random_guid = uuid.uuid4() await directory_client.set_access_control(owner=random_guid, permissions='0777') delegation_key = await dsc.get_user_delegation_key(datetime.utcnow(), datetime.utcnow() + timedelta(hours=1)) token = generate_file_system_sas( dsc.account_name, file_system_name, delegation_key, permission=DirectorySasPermissions(list=True), expiry=datetime.utcnow() + timedelta(hours=1), agent_object_id=random_guid ) sas_directory_client = FileSystemClient(self.dsc.url, file_system_name, credential=token) paths = list() async for path in sas_directory_client.get_paths(): paths.append(path) self.assertEqual(0, 0)
async def _test_file_sas_only_applies_to_file_level(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return file_name = self._get_file_reference() directory_name = self._get_directory_reference() await self._create_file_and_return_client(directory=directory_name, file=file_name) # generate a token with file level read and write permissions token = generate_file_sas( self.dsc.account_name, self.file_system_name, directory_name, file_name, self.dsc.credential.account_key, permission=FileSasPermissions(read=True, write=True), expiry=datetime.utcnow() + timedelta(hours=1), ) # read the created file which is under root directory file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, directory_name + '/' + file_name, credential=token) properties = await file_client.get_file_properties() # make sure we can read the file properties self.assertIsNotNone(properties) # try to write to the created file with the token response = await file_client.append_data(b"abcd", 0, 4, validate_content=True) self.assertIsNotNone(response) # the token is for file level, so users are not supposed to have access to file system level operations file_system_client = FileSystemClient(self.dsc.url, self.file_system_name, credential=token) with self.assertRaises(ClientAuthenticationError): await file_system_client.get_file_system_properties() # the token is for file level, so users are not supposed to have access to directory level operations directory_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, directory_name, credential=token) with self.assertRaises(ClientAuthenticationError): await directory_client.get_directory_properties()
async def get_filesystem_client(token: str) -> FileSystemClient: """ Returns a FileSystemClient initialized from config and security access token. Args: token (str): Security Access Token Returns: azure.storage.filedatalake.aio.FileSystemClient initialized from config and token. """ account_url = config['Azure Storage']['account_url'] filesystem_name = config['Azure Storage']['filesystem_name'] credential = AzureCredentialAIO(token) return FileSystemClient(account_url, filesystem_name, credential=credential)