def get_sas_token(account_name, account_key, container, readonly=True): service = BlockBlobService(account_name=account_name, account_key=account_key) # res = service.create_container(container) if readonly: permission = ContainerPermissions(read=True, list=True, write=False, delete=False) else: permission = ContainerPermissions(read=True, list=True, write=True, delete=True) expiry = (datetime.utcnow() + timedelta(days=7)).replace(hour=0, minute=0, second=0, microsecond=0) start = (datetime.utcnow() - timedelta(days=7)).replace(hour=0, minute=0, second=0, microsecond=0) token = service.generate_container_shared_access_signature( container, permission=permission, expiry=expiry, start=start) return token
def push2AzureAsBlobs(self): ''' push the files to azure ''' service = BlockBlobService(account_name=self.account_name, account_key=self.account_key) permission = ContainerPermissions(read=True, write=True) sas = service.generate_container_shared_access_signature( container_name=self.container_name, permission=permission, protocol='https', start=datetime.now(), expiry=datetime.now() + timedelta(days=1)) service = BlockBlobService(account_name=self.account_name, sas_token=sas) logfiles = self.getfilelist() for insfile in logfiles: print(insfile) basename = os.path.basename(insfile) if service.exists(container_name=self.container_name, blob_name=basename): print('File ' + basename + ' has been uploaded before.') else: service.create_blob_from_path( container_name=self.container_name, blob_name=basename, file_path=insfile, content_settings=ContentSettings( content_type=mimetypes.guess_type(basename)[0]), validate_content=False)
def get_blob_sas_url(): from azure.storage.blob import BlockBlobService, ContainerPermissions #new file name blob_name = 'configfile' accountkey = list_keys() # create a template sas token for the container service = BlockBlobService(account_name=get_straccount(), account_key=accountkey) permissions = ContainerPermissions(read=True, write=True, delete=True, list=True) temp_token1 = service.generate_blob_shared_access_signature( container_name='vpnsiteconfig', blob_name=blob_name, permission=permissions, expiry='2020-01-01') blob_sas_template_uri = service.make_blob_url( container_name='vpnsiteconfig', blob_name=blob_name, protocol='https', sas_token=temp_token1) test = blob_sas_template_uri return test
def create_sas_token(client, containerName): sas_token = client.generate_container_shared_access_signature( containerName, ContainerPermissions(write=True, read=True, list=True), expiry=datetime.datetime.utcnow() + timedelta(hours=1), start=datetime.datetime.utcnow() + timedelta(hours=-1), ) return sas_token
def generate_blob_shared_access_signature(self, container_name, blob_name): permission = ContainerPermissions(read=True, write=True) return self.__blockblob_service.generate_blob_shared_access_signature( container_name, blob_name, permission, protocol='https', start=datetime.datetime.utcnow(), expiry=datetime.datetime.utcnow() + timedelta(days=1))
async def _test_container_access_policy_async(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient blob_service_client = BlobServiceClient.from_connection_string( self.connection_string) # Instantiate a ContainerClient container_client = blob_service_client.get_container_client( "myaccesscontainerasync") try: # Create new Container await container_client.create_container() # [START set_container_access_policy] # Create access policy from azure.storage.blob import AccessPolicy, ContainerPermissions access_policy = AccessPolicy( permission=ContainerPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), start=datetime.utcnow() - timedelta(minutes=1)) identifiers = {'test': access_policy} # Set the access policy on the container await container_client.set_container_access_policy( signed_identifiers=identifiers) # [END set_container_access_policy] # [START get_container_access_policy] policy = await container_client.get_container_access_policy() # [END get_container_access_policy] # [START generate_sas_token] # Use access policy to generate a sas token sas_token = container_client.generate_shared_access_signature( policy_id='my-access-policy-id') # [END generate_sas_token] # Use the sas token to authenticate a new client # [START create_container_client_sastoken] from azure.storage.blob.aio import ContainerClient container = ContainerClient( container_url= "https://account.blob.core.windows.net/mycontainerasync", credential=sas_token, ) # [END create_container_client_sastoken] finally: # Delete container await container_client.delete_container()
def _get_build_blob_container_url(self) -> str: self.storage.create_container('builds', fail_on_exist=False) return self.storage.make_blob_url( container_name='builds', blob_name='', protocol='https', sas_token=self.storage.generate_container_shared_access_signature( container_name='builds', permission=ContainerPermissions(list=True, write=True), expiry=(datetime.utcnow() + timedelta(days=1))))
def __init__(self, credentials): """ constructor takes Azure_Credentials object as argument. """ self.credentials = credentials self.container_permissions = ContainerPermissions(read=True, write=True) self.block_blob_service = BlockBlobService( account_name=self.credentials.account_name, account_key=self.credentials.account_key, )
def _create_output_container_folder() -> str: """ Create output storage container """ # output_container_name = 'output-{}'.format(job_id) storage_client.create_container(container_name='output') return storage_client.make_blob_url( container_name='output', blob_name='', protocol='https', sas_token=storage_client. generate_container_shared_access_signature( container_name='output', permission=ContainerPermissions(list=True, write=True), expiry=(datetime.utcnow() + timedelta(days=1))))
def _list_build_resource_files() -> Iterable[ResourceFile]: """ List the files belongs to the target build in the build blob container """ permission = ContainerPermissions(read=True) expiry = (datetime.utcnow() + timedelta(days=1)) build_sas = storage_client.generate_container_shared_access_signature( container_name='builds', permission=permission, expiry=expiry) app_sas = storage_client.generate_container_shared_access_signature( container_name='app', permission=permission, expiry=expiry) return [ ResourceFile(blob_source=storage_client.make_blob_url( 'builds', output_file_name, 'https', build_sas), file_path=output_file_name), ResourceFile(blob_source=storage_client.make_blob_url( 'app', 'tangier.tar', 'https', app_sas), file_path='tangier.tar') ]
def get_blob_sas_url(): """ Creates a service SAS definition with access to a blob container. """ from azure.storage.blob import BlockBlobService, ContainerPermissions #new file name blob_name = 'configfile' accountkey = list_keys() #from azure.keyvault import SecretId # create the blob sas definition template # the sas template uri for service sas definitions contains the storage entity url with the template token # this sample demonstrates constructing the template uri for a blob container, but a similar approach can # be used for all other storage service, i.e. File, Queue, Table # create a template sas token for the container service = BlockBlobService(account_name=get_straccount(), account_key=accountkey) '''service.create_blob_from_text(container_name='vpnsiteconfig', blob_name=blob_name, text=u'test blob data'), blobs = list(service.list_blobs(container_name='vpnsiteconfig'))''' permissions = ContainerPermissions(read=True, write=True, delete=True, list=True) temp_token1 = service.generate_blob_shared_access_signature( container_name='vpnsiteconfig', blob_name=blob_name, permission=permissions, expiry='2020-01-01') blob_sas_template_uri = service.make_blob_url( container_name='vpnsiteconfig', blob_name=blob_name, protocol='https', sas_token=temp_token1) test = blob_sas_template_uri return test
def get_container_sas_url_service( container: Container, service: BlockBlobService, *, read: bool = False, add: bool = False, create: bool = False, write: bool = False, delete: bool = False, list: bool = False, ) -> str: expiry = datetime.datetime.utcnow() + datetime.timedelta(days=30) permission = ContainerPermissions(read, add, create, write, delete, list) sas_token = service.generate_container_shared_access_signature( container, permission=permission, expiry=expiry) url = service.make_container_url(container, sas_token=sas_token) url = url.replace("?restype=container&", "?") return str(url)
def get_container_sas_url( container: str, account_id: Optional[str] = None, read: bool = False, add: bool = False, create: bool = False, write: bool = False, delete: bool = False, list: bool = False, ) -> str: service = get_blob_service(account_id) expiry = datetime.datetime.utcnow() + datetime.timedelta(days=30) permission = ContainerPermissions(read, add, create, write, delete, list) sas_token = service.generate_container_shared_access_signature( container, permission=permission, expiry=expiry) url = service.make_container_url(container, sas_token=sas_token) url = url.replace("?restype=container&", "?") return str(url)
def create_container_sas_token(azure_storage_account_name, azure_storage_account_key, container_name, sas_token_hours, write_access=False, list_access=True): """ Creates an Azure SAS token from the given name + key """ max_retries = 3 retry_count = 0 while True: try: service = BlockBlobService(account_name=azure_storage_account_name, account_key=azure_storage_account_key) if write_access: service.create_container(container_name) today = datetime.utcnow() expiration_date = today + timedelta(hours=sas_token_hours) expiration_replaced_date = expiration_date.replace(microsecond=0) expiration_date_iso = expiration_replaced_date.isoformat() + "Z" service.create_container(container_name=container_name) permissions = ContainerPermissions(read=True, list=list_access, delete=write_access, write=write_access) sas_token = service.generate_container_shared_access_signature( container_name=container_name, expiry=expiration_date_iso, permission=permissions) return sas_token except Exception as exc: retry_count += 1 if retry_count > max_retries: print "Azure storage error: " + str(exc) sys.exit(200) message = str(exc) print message sleep(1)
def create_blob_sas_defintion(self, storage_account_name, vault_url): """ Creates a service SAS definition with access to a blob container. """ from azure.storage.blob import BlockBlobService, ContainerPermissions from azure.keyvault.models import SasTokenType, SasDefinitionAttributes from azure.keyvault import SecretId # create the blob sas definition template # the sas template uri for service sas definitions contains the storage entity url with the template token # this sample demonstrates constructing the template uri for a blob container, but a similar approach can # be used for all other storage service, i.e. File, Queue, Table # create a template sas token for the container service = BlockBlobService( account_name=storage_account_name, # don't sign the template with the storage account key use key 00000000 account_key='00000000') permissions = ContainerPermissions(read=True, write=True, delete=True, list=True) temp_token = service.generate_container_shared_access_signature( container_name='blobcontainer', permission=permissions, expiry='2020-01-01') # use the BlockBlobService to construct the template uri for the container sas definition blob_sas_template_uri = service.make_container_url( container_name='blobcontainer', protocol='https', sas_token=temp_token) # create the sas definition in the vault attributes = SasDefinitionAttributes(enabled=True) blob_sas_def = self.client.set_sas_definition( vault_base_url=vault_url, storage_account_name=storage_account_name, sas_definition_name='blobcontall', template_uri=blob_sas_template_uri, sas_type=SasTokenType.service, validity_period='PT2H', sas_definition_attributes=attributes) # use the sas definition to provision a sas token and use it to create a BlockBlobClient # which can interact with blobs in the container # get the secret_id of the container sas definition and get the token from the vault as a secret sas_secret_id = SecretId(uri=blob_sas_def.secret_id) blob_sas_token = self.client.get_secret( vault_base_url=sas_secret_id.vault, secret_name=sas_secret_id.name, secret_version=sas_secret_id.version).value service = BlockBlobService(account_name=storage_account_name, sas_token=blob_sas_token) service.create_blob_from_text(container_name='blobcontainer', blob_name='blob2', text=u'test blob2 data') blobs = list(service.list_blobs(container_name='blobcontainer')) for blob in blobs: service.delete_blob(container_name='blobcontainer', blob_name=blob.name)
from azure.storage.blob import (BlockBlobService, ContainerPermissions) from azure.storage.models import CorsRule from datetime import datetime, timedelta ACCOUNT_NAME = "account_name" ACCOUNT_KEY = "account_key" CONTAINER_NAME = 'container_name' block_blob_service = BlockBlobService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY) sas_url = block_blob_service.generate_container_shared_access_signature( CONTAINER_NAME, ContainerPermissions(True, True, True, True, ''), datetime.utcnow() + timedelta(weeks=52), ) print sas_url print "---------------------------------------------" url = ['*'] method = ['GET', 'POST', 'PUT', 'OPTIONS'] corslist = [] corslist.append( CorsRule(url, method, allowed_headers=['*'], max_age_in_seconds=20, exposed_headers=['*'])) block_blob_service.set_blob_service_properties(None, None, None, corslist,