def test_rename_file_with_file_system_sas(self, datalake_storage_account_name, datalake_storage_account_key): self._setUp(datalake_storage_account_name, datalake_storage_account_key) # sas token is calculated from storage key, so live only token = generate_file_system_sas( self.dsc.account_name, self.file_system_name, self.dsc.credential.account_key, FileSystemSasPermissions(write=True, read=True, delete=True), datetime.utcnow() + timedelta(hours=1), ) # read the created file which is under root directory file_client = DataLakeFileClient(self.dsc.url, self.file_system_name, "oldfile", credential=token) file_client.create_file() data_bytes = b"abc" file_client.append_data(data_bytes, 0, 3) file_client.flush_data(3) new_client = file_client.rename_file(file_client.file_system_name + '/' + 'newname') data = new_client.download_file().readall() self.assertEqual(data, data_bytes) self.assertEqual(new_client.path_name, "newname")
async def test_get_access_control_using_delegation_sas_async( self, datalake_storage_account_name, datalake_storage_account_key): self._setUp(datalake_storage_account_name, datalake_storage_account_key) url = self._get_account_url(datalake_storage_account_name) token_credential = self.generate_oauth_token() dsc = DataLakeServiceClient(url, token_credential, logging_enable=True) file_system_name = self._get_file_system_reference() directory_client_name = '/' (await dsc.create_file_system(file_system_name)).get_directory_client(directory_client_name) directory_client = self.dsc.get_directory_client(file_system_name, directory_client_name) random_guid = uuid.uuid4() await directory_client.set_access_control(owner=random_guid, permissions='0777') acl = await directory_client.get_access_control() delegation_key = await dsc.get_user_delegation_key(datetime.utcnow(), datetime.utcnow() + timedelta(hours=1)) token = generate_file_system_sas( dsc.account_name, file_system_name, delegation_key, permission=FileSystemSasPermissions( read=True, execute=True, manage_access_control=True, manage_ownership=True), expiry=datetime.utcnow() + timedelta(hours=1), agent_object_id=random_guid ) sas_directory_client = DataLakeDirectoryClient(self.dsc.url, file_system_name, directory_client_name, credential=token, logging_enable=True) access_control = await sas_directory_client.get_access_control() self.assertIsNotNone(access_control)
async def test_set_file_system_acl_async(self, datalake_storage_account_name, datalake_storage_account_key): self._setUp(datalake_storage_account_name, datalake_storage_account_key) # Act file_system = await self._create_file_system() access_policy = AccessPolicy( permission=FileSystemSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), start=datetime.utcnow()) signed_identifier1 = {'testid': access_policy} response = await file_system.set_file_system_access_policy( signed_identifier1, public_access=PublicAccess.FileSystem) self.assertIsNotNone(response.get('etag')) self.assertIsNotNone(response.get('last_modified')) acl1 = await file_system.get_file_system_access_policy() self.assertIsNotNone(acl1['public_access']) self.assertEqual(len(acl1['signed_identifiers']), 1) # If set signed identifier without specifying the access policy then it will be default to None signed_identifier2 = {'testid': access_policy, 'test2': access_policy} await file_system.set_file_system_access_policy(signed_identifier2) acl2 = await file_system.get_file_system_access_policy() self.assertIsNone(acl2['public_access']) self.assertEqual(len(acl2['signed_identifiers']), 2)
def test_upload_download_data(self): registered_path = "demo-folder-allowed" invalid_path = "demo-folder-restricted" print("\nGenerating SAS for directory: {}".format(registered_path)) # generate a token for a directory with all permissions token = generate_directory_sas( self.ACCOUNT_NAME, self.STORAGE_FILESYSTEM, registered_path, self.ACCOUNT_KEY, permission=FileSystemSasPermissions(read=True,write=True,delete=True,list=True), expiry=datetime.utcnow() + timedelta(hours=1), ) # Try with a path that matches the SAS token try: self.inner_transfer(registered_path, token) except: print("Upload failed.") # Try with a path that doesnt match the SAS token. Should fail! try: self.inner_transfer(invalid_path, token) except: print("Upload failed.")
async def _test_rename_dir_with_file_system_sas(self): if TestMode.need_recording_file(self.test_mode): return token = generate_file_system_sas( self.dsc.account_name, self.file_system_name, self.dsc.credential.account_key, FileSystemSasPermissions(write=True, read=True, delete=True), datetime.utcnow() + timedelta(hours=1), ) # read the created file which is under root directory dir_client = DataLakeDirectoryClient(self.dsc.url, self.file_system_name, "olddir", credential=token) await dir_client.create_directory() new_client = await dir_client.rename_directory( dir_client.file_system_name + '/' + 'newdir' + '?') properties = await new_client.get_directory_properties() self.assertEqual(properties.name, "newdir")
def test_using_directory_sas_to_read(self): storage_directory = "demo-folder-allowed" print("\nGenerating SAS for directory: {}".format(storage_directory)) print("\nReading contents...") # generate a token for a directory with all permissions token = generate_directory_sas( self.ACCOUNT_NAME, self.STORAGE_FILESYSTEM, storage_directory, self.ACCOUNT_KEY, permission=FileSystemSasPermissions(read=True,write=True,delete=True,list=True), expiry=datetime.utcnow() + timedelta(hours=1), ) service_client = DataLakeServiceClient(self.STORAGE_URL, credential=token) file_system_client = service_client.get_file_system_client(self.STORAGE_FILESYSTEM) paths = list(file_system_client.get_paths(storage_directory)) for p in paths: print(p.name)