def get_storage_account_details(subscription_id, creds, resource_group_name, account_name): storage_client = StorageManagementClient(creds, subscription_id) account_result = storage_client.storage_accounts.get_properties( resource_group_name, account_name, ) storage_account_keys = storage_client.storage_accounts.list_keys( resource_group_name, account_name, ) account_key = storage_account_keys.key1 account = CloudStorageAccount(account_name, account_key) blob_service = account.create_blob_service() file_service = account.create_file_service() queue_service = account.create_queue_service() table_service = account.create_table_service() model = StorageAccountDetails() model.account_props = account_result model.account_keys = storage_account_keys model.blob_containers = blob_service.iterate_containers() model.queues = queue_service.iterate_queues() #TODO: find out why listing shares doesn't work #model.shares = file_service.iterate_shares() model.shares = [] model.tables = table_service.iterate_tables() model.blob_service_properties = blob_service.get_blob_service_properties() model.queue_service_properties = queue_service.get_queue_service_properties( ) model.table_service_properties = table_service.get_table_service_properties( ) return model
def get_storage_account_details(creds, resource_group_name, account_name): storage_client = StorageManagementClient(creds) account_result = storage_client.storage_accounts.get_properties(resource_group_name, account_name) keys_result = storage_client.storage_accounts.list_keys(resource_group_name, account_name) account_key = keys_result.storage_account_keys.key1 account = CloudStorageAccount(account_name, account_key) blob_service = account.create_blob_service() file_service = account.create_file_service() queue_service = account.create_queue_service() table_service = account.create_table_service() model = StorageAccountDetails() model.account_props = account_result.storage_account model.account_keys = keys_result.storage_account_keys model.blob_containers = blob_service.iterate_containers() model.queues = queue_service.iterate_queues() # TODO: find out why listing shares doesn't work # model.shares = file_service.iterate_shares() model.shares = [] model.tables = table_service.iterate_tables() model.blob_service_properties = blob_service.get_blob_service_properties() model.queue_service_properties = queue_service.get_queue_service_properties() model.table_service_properties = table_service.get_table_service_properties() return model
def _turn_on_minute_metrics(self, name, key, metric_table_retention_period): account = CloudStorageAccount(account_name=name, account_key=key, sas_token=None) retention_policy = RetentionPolicy(enabled=True, days=metric_table_retention_period) metrics = Metrics(enabled=True, include_apis=True, retention_policy=retention_policy) table_service = account.create_table_service() table_service.set_table_service_properties(minute_metrics=metrics) blob_service = account.create_page_blob_service() blob_service.set_blob_service_properties(minute_metrics=metrics) file_service = account.create_file_service() file_service.set_file_service_properties(minute_metrics=metrics) queue_service = account.create_queue_service() queue_service.set_queue_service_properties(minute_metrics=metrics)
def create_file_share(**_): '''Creates an Azure File Share''' # Get resource config values res_cfg = utils.get_resource_config() or dict() share_name = ctx.node.properties.get('name') metadata = res_cfg.get('metadata') quota = res_cfg.get('quota') fail_on_exist = res_cfg.get('fail_on_exist', False) # Check if invalid external resource if ctx.node.properties.get('use_external_resource', False) and \ not share_name: raise NonRecoverableError( '"use_external_resource" specified without a resource "name"') # Get the storage account storage_account = utils.get_parent(ctx.instance, rel_type=constants.REL_CONTAINED_IN_SA) storage_account_name = utils.get_resource_name(_ctx=storage_account) # Get the storage account keys keys = StorageAccount(_ctx=storage_account).list_keys() if not isinstance(keys, list) or len(keys) < 1: raise RecoverableError( 'StorageAccount reported no usable authentication keys') # Get an interface to the Storage Account storage_account_key = keys[0].get('key') storageacct = CloudStorageAccount(account_name=storage_account_name, account_key=storage_account_key) # Get an interface to the File Service filesvc = storageacct.create_file_service() if ctx.node.properties.get('use_external_resource', False): # Attempt to use an existing File Share (if specified) ctx.logger.debug( 'Checking for existing File Share "{0}"'.format(share_name)) try: share = filesvc.get_share_properties(share_name) metadata = share.get('metadata', dict()) quota = share.get('properties', dict()).get('quota') created = False except Exception as ex: ctx.logger.error( 'File Share "{0}" does not exist and ' '"use_external_resource" is set to true'.format(share_name)) raise NonRecoverableError(ex) else: # Generate a new File Share name if needed if not share_name: ctx.logger.info('Generating a new File Share name') for _ in xrange(0, 10): tmpname = file_share_name_generator() if not file_share_exists(filesvc, tmpname): share_name = tmpname break # Handle name error if not share_name: raise NonRecoverableError( 'Error generating a new File Share name. Failed ' 'after 10 tries.') # Attempt to create the File Share ctx.logger.debug('Creating File Share "{0}"'.format(share_name)) created = filesvc.create_share(share_name=share_name, metadata=metadata, quota=quota, fail_on_exist=False) if not created: ctx.logger.warn('File Share already exists') if fail_on_exist: raise NonRecoverableError( 'File Share already exists in the storage account and ' '"fail_on_exist" set to True') # Set run-time properties ctx.instance.runtime_properties['name'] = share_name ctx.instance.runtime_properties['quota'] = quota ctx.instance.runtime_properties['metadata'] = metadata ctx.instance.runtime_properties['created'] = created ctx.instance.runtime_properties['storage_account'] = storage_account_name ctx.instance.runtime_properties['username'] = storage_account_name ctx.instance.runtime_properties['password'] = storage_account_key ctx.instance.runtime_properties['uri'] = '{0}.{1}/{2}'.format( storage_account_name, constants.CONN_STORAGE_FILE_ENDPOINT, share_name)
## USE WITH CAUTION ** duplicate files on the target will be overwritten if they already exist. ** ##define Azure storage account details source_account_name = '**insert your source storage account name here**' source_account_key = '**insert your source key here**' source_account_sas = '**insert your source SAS token here**' target_account_name = '**insert your target storage account name here**' target_account_key = '**insert your target key here**' ##create object that contains the cloud storage account source_account = CloudStorageAccount(source_account_name, source_account_key) target_account = CloudStorageAccount(target_account_name, target_account_key) ##create object that contains the file service of the storage account source_file_service = source_account.create_file_service() target_file_service = target_account.create_file_service() ##define the function to list all files and directories within a given folder ##the function will call itself if there are nested folders, so we include a nest level to track how deep we are to create our indents def list_file_and_dir( share, current_dir, nest_level ): #receiving the current share, current direcotry if any, and nest level source_file_list = source_file_service.list_directories_and_files( share.name, current_dir ) #build a directory and file list for the current working directory for file_or_dir in source_file_list: #interate through each file or directory in the current working directory file_type = file_or_dir.__class__.__name__ #get the file type of 'File' or 'Directory' for the current object for x in range(0, nest_level ): #create our indents based on the current nest level
class StorageAccountTest(StorageTestCase): def setUp(self): super(StorageAccountTest, self).setUp() self.account_name = self.settings.STORAGE_ACCOUNT_NAME self.account_key = self.settings.STORAGE_ACCOUNT_KEY self.sas_token = '?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D' self.account = CloudStorageAccount(self.account_name, self.account_key) #--Helpers----------------------------------------------------------------- def validate_service(self, service, type): self.assertIsNotNone(service) self.assertIsInstance(service, type) self.assertEqual(service.account_name, self.account_name) self.assertEqual(service.account_key, self.account_key) #--Test cases -------------------------------------------------------- def test_create_block_blob_service(self): # Arrange # Act service = self.account.create_block_blob_service() # Assert self.validate_service(service, BlockBlobService) def test_create_page_blob_service(self): # Arrange # Act service = self.account.create_page_blob_service() # Assert self.validate_service(service, PageBlobService) def test_create_append_blob_service(self): # Arrange # Act service = self.account.create_append_blob_service() # Assert self.validate_service(service, AppendBlobService) def test_create_table_service(self): # Arrange # Act service = self.account.create_table_service() # Assert self.validate_service(service, TableService) def test_create_queue_service(self): # Arrange # Act service = self.account.create_queue_service() # Assert self.validate_service(service, QueueService) def test_create_file_service(self): # Arrange # Act service = self.account.create_file_service() # Assert self.validate_service(service, FileService) def test_create_service_no_key(self): # Arrange # Act bad_account = CloudStorageAccount('', '') with self.assertRaises(ValueError): service = bad_account.create_block_blob_service() # Assert def test_create_account_sas(self): # Arrange # Act sas_account = CloudStorageAccount(self.account_name, sas_token=self.sas_token) service = sas_account.create_block_blob_service() # Assert self.assertIsNotNone(service) self.assertEqual(service.account_name, self.account_name) self.assertIsNone(service.account_key) self.assertEqual(service.sas_token, self.sas_token) def test_create_account_sas_and_key(self): # Arrange # Act account = CloudStorageAccount(self.account_name, self.account_key, self.sas_token) service = account.create_block_blob_service() # Assert self.validate_service(service, BlockBlobService) def test_create_account_emulated(self): # Arrange # Act account = CloudStorageAccount(is_emulated=True) service = account.create_block_blob_service() # Assert self.assertIsNotNone(service) self.assertEqual(service.account_name, 'devstoreaccount1') self.assertIsNotNone(service.account_key) @record def test_generate_account_sas(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recordingfile(self.test_mode): return # Arrange token = self.account.generate_shared_access_signature( Services.BLOB, ResourceTypes.OBJECT, AccountPermissions.READ, datetime.utcnow() + timedelta(hours=1), ) service = self.account.create_block_blob_service() data = b'shared access signature with read permission on blob' container_name='container1' blob_name = 'blob1.txt' try: service.create_container(container_name) service.create_blob_from_bytes(container_name, blob_name, data) # Act url = service.make_blob_url( container_name, blob_name, sas_token=token, ) response = requests.get(url) # Assert self.assertTrue(response.ok) self.assertEqual(data, response.content) finally: service.delete_container(container_name)
#--------- Create Input DB -------------------------------- print('DB In \n') result = DB_processing(client, configIn, True)[0] shareAPI = result['sharename'] dirAPI = result['directoryname'] fileAPI = result['filename'] print('\nDone \n ') print('---------------------------------------------------------------') print('Emotion API \n') #--------- Getting Image from file Storage -------------------------------- file_service2 = account.create_file_service() image4API = file_service2.get_file_to_bytes(shareAPI, dirAPI, fileAPI) #with open( image4API, 'rb' ) as f: data = image4API.content else: #--------- Getting Image Local -------------------------------- with open(pathToFileInDisk, 'rb') as f: data = f.read() print(data) headers = dict() headers['Ocp-Apim-Subscription-Key'] = _key
import azure.common from azure.storage import CloudStorageAccount import sys ##define Azure storage account details source_account_name = '**insert your storage account name here**' source_account_key = '**insert your key here**' ##create object that contains the cloud storage account source_account = CloudStorageAccount(source_account_name, source_account_key) ##create object that contains the file service of the storage account source_file_service = source_account.create_file_service() ##define the function to list all files and directories within a given folder ##the function will call itself if there are nested folders, so we include a nest level to track how deep we are to create our indents def list_file_and_dir(share, current_dir, nest_level): #receiving the current share, current direcotry if any, and nest level source_file_list = source_file_service.list_directories_and_files(share.name, current_dir) #build a directory and file list for the current working directory for file_or_dir in source_file_list: #interate through each file or directory in the current working directory file_type = file_or_dir.__class__.__name__ #get the file type of 'File' or 'Directory' for the current object for x in range(0, nest_level): #create our indents based on the current nest level sys.stdout.write(str(' ')) #prints a string without a carriage return for our indents print('|--' + file_or_dir.name) #the prefix for our file or directory name if file_type == 'Directory': #if the current object is a directory, we are going to call this function again if current_dir == '': #if we are in the root folder of the share, we don't want to pass a forward slash to the function. '/dirname' is not valid list_file_and_dir(share, file_or_dir.name, nest_level + 1) else: next_dir = current_dir + '/' + file_or_dir.name #if we are in a sub-folder, we want to add a forward slash. 'dirname/subdir' is valid list_file_and_dir(share, next_dir, nest_level + 1) ##this is where our main program starts