def get_image_url(self, image_name: str): '''Generate an url including sas token to access image in the store.''' container_sas = generate_container_sas( account_name=self.sa_name, account_key=self.sa_key, container_name=self.container_name, permission=ContainerSasPermissions(read=True, list=True), start=datetime.utcnow() - timedelta(days=1), expiry=datetime.utcnow() + timedelta(days=30)) return f"https://{self.sa_name}.blob.core.windows.net/{self.container_name}/{image_name}?{container_sas}"
def generate_sas_url(self, container_name, permission): sas_token = generate_container_sas(account_name=self.storage_name, container_name=container_name, account_key=self.storage_key, permission=permission, expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=2)) container_sas_url = self.storage_endpoint + container_name + "?" + sas_token return container_sas_url
async def container_access_policy_async(self): # Instantiate a BlobServiceClient using a connection string from azure.storage.blob.aio import BlobServiceClient blob_service_client = BlobServiceClient.from_connection_string(self.connection_string) async with blob_service_client: # Instantiate a ContainerClient container_client = blob_service_client.get_container_client("myaccesscontainerasync") try: # Create new Container await container_client.create_container() # [START set_container_access_policy] # Create access policy from azure.storage.blob import AccessPolicy, ContainerSasPermissions access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), start=datetime.utcnow() - timedelta(minutes=1)) identifiers = {'my-access-policy-id': access_policy} # Set the access policy on the container await container_client.set_container_access_policy(signed_identifiers=identifiers) # [END set_container_access_policy] # [START get_container_access_policy] policy = await container_client.get_container_access_policy() # [END get_container_access_policy] # [START generate_sas_token] # Use access policy to generate a sas token from azure.storage.blob import generate_container_sas sas_token = generate_container_sas( container_client.account_name, container_client.container_name, account_key=container_client.credential.account_key, policy_id='my-access-policy-id' ) # [END generate_sas_token] # Use the sas token to authenticate a new client # [START create_container_client_sastoken] from azure.storage.blob.aio import ContainerClient container = ContainerClient.from_container_url( container_url="https://account.blob.core.windows.net/mycontainerasync", credential=sas_token, ) # [END create_container_client_sastoken] finally: # Delete container await container_client.delete_container()
def generate_container_sastoken(container_client): sas_token = generate_container_sas( container_client.account_name, container_client.container_name, account_key=container_client.credential.account_key, permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=730), ) print('SAS token for the storage container ?{0}'.format(sas_token)) return sas_token
def generate_sas_url(self, container, permissions): sas_token = generate_container_sas( account_name=self.storage_account_name, container_name=container.container_name, account_key=self.storage_key, permission=permissions, expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=1)) container_sas_url = self.storage_endpoint + container.container_name + "?" + sas_token print("Generating {} SAS URL".format(container.container_name)) return container_sas_url
def get_service_sas_token(connection_string, storage_account, container_name): blob_service_client = BlobServiceClient.from_connection_string( connection_string) service_sas_token = generate_container_sas( storage_account, container_name, account_key=blob_service_client.credential.account_key, permission=ContainerSasPermissions(read=True, write=True), expiry=datetime.utcnow() + timedelta(hours=1), ) print(service_sas_token) return service_sas_token
def generate_container_sas_uri(container_name, policy_id): container_client = get_container_client(container_name) container_sas_token = generate_container_sas( container_client.account_name, container_client.container_name, account_key=container_client.credential.account_key, policy_id= policy_id ) container_url_with_container_sas_token = f"https://{container_client.account_name}.blob.core.windows.net/{container_client.container_name}?{container_sas_token}" return container_url_with_container_sas_token
def generate_sas_with_sdk(connection_string, azure_container): account_name, account_key = build_creds(connection_string) sas_token = generate_container_sas( account_name, azure_container, account_key=account_key, permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(days=1), ) return 'https://{}.blob.core.windows.net/{}/{{}}?{}'.format( account_name, azure_container, sas_token)
async def get_clips(self): blobs = self.store_service.list_blobs(name_starts_with=self.clips_path) if not self._SAS_token: self._SAS_token = generate_container_sas( account_name=self._account_name, container_name=self._container, account_key=self._account_key, permission=ContainerSasPermissions(read=True), expiry=datetime.datetime.utcnow() + datetime.timedelta(days=14)) await self.retrieve_contents(blobs)
def generate_sas_token_container(self, container_name, valid_for=7): sas_token = generate_container_sas( account_name=self.account_name, account_key=self.account_key, container_name=container_name, permission=BlobSasPermissions.from_string("racwd"), start=datetime.utcnow(), expiry=datetime.utcnow() + timedelta(days=valid_for), ) sas_token = "?" + sas_token return sas_token
def getImgUrlWithContainerSasToken(blobName, containerName): content = requests.get(loginJsonFile) data = json.loads(content.content) accountName = data["Login"]["Account"] accountKey = data["Login"]["Key"] containerSasToken = generate_container_sas( account_name=accountName, container_name=containerName, account_key=accountKey, permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1)) blob_url_with_container_sas_token = f"https://{accountName}.blob.core.windows.net/{containerName}/{blobName}?{containerSasToken}" return blob_url_with_container_sas_token
def getContainerClient(input_container_name): sas_token = azureblob.generate_container_sas( os.environ.get('AZURE_BLOB_ACCOUNT_NAME'), input_container_name, account_key=os.environ.get('AZURE_BLOB_ACCOUNT_KEY'), policy_id='container_policy') container = azureblob.ContainerClient.from_container_url( container_url="https://" + os.environ.get('AZURE_BLOB_ACCOUNT_NAME') + ".blob.core.windows.net/" + input_container_name, credential=sas_token) return container
def create_form_client_and_container_sas_url(self, **kwargs): form_recognizer_account = self.client_kwargs.pop("form_recognizer_account", None) if form_recognizer_account is None: form_recognizer_account = kwargs.pop("form_recognizer_account") form_recognizer_account_key = self.client_kwargs.pop("form_recognizer_account_key", None) if form_recognizer_account_key is None: form_recognizer_account_key = kwargs.pop("form_recognizer_account_key") storage_account = self.client_kwargs.pop("storage_account", None) if storage_account is None: storage_account = kwargs.pop("storage_account") storage_account_key = self.client_kwargs.pop("storage_account_key", None) if storage_account_key is None: storage_account_key = kwargs.pop("storage_account_key") if self.is_live: container_name = self.resource_random_name.replace("_", "-") # container names can't have underscore container_client = ContainerClient(storage_account.primary_endpoints.blob, container_name, storage_account_key) container_client.create_container() training_path = os.path.abspath(os.path.join(os.path.abspath(__file__), "..", "./sample_forms/training/")) for path, folder, files in os.walk(training_path): for document in files: with open(os.path.join(path, document), "rb") as data: if document == "Form_6.jpg": document = "subfolder/Form_6.jpg" # create virtual subfolder in container container_client.upload_blob(name=document, data=data) sas_token = generate_container_sas( storage_account.name, container_name, storage_account_key, permission=ContainerSasPermissions.from_string("rl"), expiry=datetime.utcnow() + timedelta(hours=1) ) container_sas_url = storage_account.primary_endpoints.blob + container_name + "?" + sas_token else: container_sas_url = "containersasurl" return self.client_cls( form_recognizer_account, AzureKeyCredential(form_recognizer_account_key), **self.client_kwargs ), container_sas_url
def deploy(file, version, product, group, container, days): file_size = os.stat(file).st_size file_url = f"https://{stroage_account_name}.blob.core.windows.net/{container}/{os.path.basename(file)}" file_sas = generate_container_sas( account_name=stroage_account_name, container_name=container, account_key=account_access_key, permission=ContainerSasPermissions(read=True, list=True), expiry=datetime.utcnow() + timedelta(days=days)) with open(file, "rb") as f: file_sha256 = hashlib.sha256(f.read()).hexdigest().upper() iothub_conn_str = os.environ["AZURE_IOTHUB_CONNECTIONSTRING"] iothub_configuration = IoTHubConfigurationManager(iothub_conn_str) config = models.Configuration() config.id = "ota_v" + str(version) config.content = models.ConfigurationContent( device_content={ "properties.desired.extFwInfo": { "version": version, "size": file_size, "url": file_url, "sas": file_sas, "sha256": file_sha256 } }) config.metrics = models.ConfigurationMetrics( queries={ "Downloading": f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='downloading'", "Interrupted": f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='interrupted'", "Applying": f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='applying'", "Applied": f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='applied'", "Error": f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='error'" }) config.target_condition = f"tags.productType='{product}' AND tags.deviceGroup='{group}'" config.priority = version iothub_configuration.create_configuration(config)
def create_sas_token(blob_service, storage_account, container, permissions=ContainerSasPermissions(read=True, list=True), expire_hours=1, start_hours=1): expiry_time = datetime.utcnow() + timedelta(hours=expire_hours) start_time = datetime.utcnow() - timedelta(hours=start_hours) return generate_container_sas( storage_account, container, permission=permissions, expiry=expiry_time, start=start_time, account_key=blob_service.credential.account_key)
def get_playback_sas(self) -> SharedAccessKey: """Gets (renewing if necessary) a shared access key for playing back tracks.""" if self._sas.expires_at > datetime.utcnow() + timedelta(hours=1): return self._sas expires_at = datetime.utcnow() + playback_sas_ttl value = generate_container_sas( account_name=self.account_name, account_key=self._config["azure"]["account_key"], container_name=self.songs_container, permission=BlobSasPermissions(read=True), expiry=expires_at, ) self._sas = SharedAccessKey(expires_at, value) return self._sas
def upload_instance_setup(self) -> None: logger.info("uploading instance-specific-setup from %s", self.instance_specific) account_name = self.results["deploy"]["func-name"]["value"] key = self.results["deploy"]["func-key"]["value"] account_url = "https://%s.blob.core.windows.net" % account_name client = BlobServiceClient(account_url, credential=key) if "instance-specific-setup" not in [ x["name"] for x in client.list_containers() ]: client.create_container("instance-specific-setup") expiry = datetime.utcnow() + timedelta(minutes=30) sas = generate_container_sas( account_name, "instance-specific-setup", account_key=key, permission=ContainerSasPermissions( read=True, write=True, delete=True, list=True ), expiry=expiry, ) url = "%s/%s?%s" % (account_url, "instance-specific-setup", sas) subprocess.check_output( [ self.azcopy, "copy", os.path.join(self.instance_specific, "*"), url, "--overwrite=true", "--recursive=true", ] ) subprocess.check_output( [ self.azcopy, "sync", self.instance_specific, url, "--delete-destination", "true", ] )
def get_container_url( account_name: str, account_key: str, container_name: str, access_policy: str = "read_only", expiry: datetime.timedelta = datetime.timedelta(days=365), **kwargs, ) -> str: """Get the full url, for the given container on the given account, with a Shared Access Signature granting the specified access policy. Args: account_name: name of the storage account for which to generate the URL account_key: shared account key of the storage account used to generate the SAS container_name: name of the container for which to grant access in the storage account access_policy: one of ``read_only``, ``append_only``, ``full`` expiry: the interval in the future with which the signature will expire Returns: the full URL of the container, with the shared access signature. """ access_policies = { "read_only": ContainerSasPermissions( read=True, list=True, delete=False, write=False ), "append_only": ContainerSasPermissions( read=True, list=True, delete=False, write=True ), "full": ContainerSasPermissions(read=True, list=True, delete=True, write=True), } current_time = datetime.datetime.utcnow() signature = generate_container_sas( account_name, container_name, account_key=account_key, permission=access_policies[access_policy], start=current_time + datetime.timedelta(minutes=-1), expiry=current_time + expiry, ) return f"https://{account_name}.blob.core.windows.net/{container_name}?{signature}"
def add_container_sas_url(container_url: str) -> str: parsed = urllib.parse.urlparse(container_url) query = urllib.parse.parse_qs(parsed.query) if "sig" in query: return container_url else: account_name = parsed.netloc.split(".")[0] account_key = get_storage_account_name_key_by_name(account_name) sas_token = generate_container_sas( account_name=account_name, container_name=parsed.path.split("/")[1], account_key=account_key, permission=ContainerSasPermissions(read=True, write=True, delete=True, list=True), expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=1), ) return f"{container_url}?{sas_token}"
def generate_writable_container_sas(account_name: str, account_key: str, container_name: str, access_duration_hrs: float, account_url: Optional[str] = None) -> str: """Creates a container and returns a SAS URI with read/write/list permissions. Args: account_name: str, name of blob storage account account_key: str, account SAS token or account shared access key container_name: str, name of container to create, must not match an existing container in the given storage account access_duration_hrs: float account_url: str, optional, defaults to default Azure Storage URL Returns: str, URL to newly created container Raises: azure.core.exceptions.ResourceExistsError, if container already exists NOTE: This method currently fails on non-default Azure Storage URLs. The initializer for ContainerClient() assumes the default Azure Storage URL format, which is a bug that has been reported here: https://github.com/Azure/azure-sdk-for-python/issues/12568 """ if account_url is None: account_url = build_azure_storage_uri(account=account_name) container_client = ContainerClient(account_url=account_url, container_name=container_name, credential=account_key) container_client.create_container() permissions = ContainerSasPermissions(read=True, write=True, list=True) container_sas_token = generate_container_sas( account_name=account_name, container_name=container_name, account_key=account_key, permission=permissions, expiry=datetime.utcnow() + timedelta(hours=access_duration_hrs)) return f'{account_url}/{container_name}?{container_sas_token}'
def upload_third_party(self) -> None: logger.info("uploading third-party tools from %s", self.third_party) account_name = self.results["deploy"]["fuzz-name"]["value"] key = self.results["deploy"]["fuzz-key"]["value"] account_url = "https://%s.blob.core.windows.net" % account_name client = BlobServiceClient(account_url, credential=key) containers = [x["name"] for x in client.list_containers()] for name in os.listdir(self.third_party): path = os.path.join(self.third_party, name) if not os.path.isdir(path): continue if name not in containers: client.create_container(name) expiry = datetime.utcnow() + timedelta(minutes=30) sas = generate_container_sas( account_name, name, account_key=key, permission=ContainerSasPermissions( read=True, write=True, delete=True, list=True ), expiry=expiry, ) url = "%s/%s?%s" % (account_url, name, sas) subprocess.check_output( [ self.azcopy, "copy", os.path.join(path, "*"), url, "--overwrite=true", "--recursive=true", ] ) subprocess.check_output( [self.azcopy, "sync", path, url, "--delete-destination", "true"] )
def build_output_file(self, output_file, container_path) -> azure.batch.models.ResourceFile: """ Uploads a local file to an Azure Blob storage container. Args: output_file: the name of the file produced as the output by the task container_path: the name of the file in the container Returns: A ResourceFile initialized with a SAS URL appropriate for Batch tasks. """ # where to store the outputs container_sas_url = ( self.container_client.url + "?" + generate_container_sas( self.container_client.account_name, self.container_client.container_name, permission=ContainerSasPermissions( read=True, write=True, delete=True, list=True), expiry=datetime.datetime.utcnow() + datetime.timedelta( hours=self.config.STORAGE_ACCESS_DURATION_HRS), account_key=self.config.STORAGE_ACCOUNT_KEY, )) destination = models.OutputFileDestination( container=models.OutputFileBlobContainerDestination( container_url=container_sas_url, path=container_path)) # Under what conditions should Azure Batch attempt to extract the outputs? upload_options = models.OutputFileUploadOptions( upload_condition=models.OutputFileUploadCondition.task_success) # https://docs.microsoft.com/en-us/azure/batch/batch-task-output-files#specify-output-files-for-task-output out = models.OutputFile( file_pattern=output_file, destination=destination, upload_options=upload_options, ) self.output_files.append(container_path) return out
def get_container_sas_url_service( client: ContainerClient, *, read: bool = False, write: bool = False, delete: bool = False, list_: bool = False, delete_previous_version: bool = False, tag: bool = False, days: int = 30, hours: int = 0, minutes: int = 0, ) -> str: account_name = client.account_name container_name = client.container_name account_key = get_storage_account_name_key_by_name(account_name) start, expiry = sas_time_window(days=days, hours=hours, minutes=minutes) sas = generate_container_sas( account_name, container_name, account_key=account_key, permission=ContainerSasPermissions( read=read, write=write, delete=delete, list=list_, delete_previous_version=delete_previous_version, tag=tag, ), start=start, expiry=expiry, ) with_sas = ContainerClient( get_url(account_name), container_name=container_name, credential=sas, ) return cast(str, with_sas.url)
def add_container_sas_url( container_url: str, duration: datetime.timedelta = CONTAINER_SAS_DEFAULT_DURATION ) -> str: parsed = urllib.parse.urlparse(container_url) query = urllib.parse.parse_qs(parsed.query) if "sig" in query: return container_url else: start, expiry = sas_time_window(duration) account_name = parsed.netloc.split(".")[0] account_key = get_storage_account_name_key_by_name(account_name) sas_token = generate_container_sas( account_name=account_name, container_name=parsed.path.split("/")[1], account_key=account_key, permission=ContainerSasPermissions( read=True, write=True, delete=True, list=True ), expiry=expiry, start=start, ) return f"{container_url}?{sas_token}"
def get_container_uri(connection_string: str, container_name: str) -> str: """ Creates and initialize a container; returns a URI with a SAS read/write token to access it. """ container = create_container(connection_string, container_name) logger.info( f"Creating SAS token for container '{container_name}' on account: '{container.account_name}'" ) sas_token = generate_container_sas( container.account_name, container.container_name, account_key=container.credential.account_key, permission=BlobSasPermissions(read=True, add=True, write=True, create=True), expiry=datetime.utcnow() + timedelta(days=14)) uri = container.url + "?" + sas_token logger.debug(f" - container url: '{uri}'.") return uri
def create_writable_container_sas(self, account_name, account_key, container_name, access_duration_hrs): account_url = "https://{}.blob.core.windows.net".format(account_name) blob_service_client = BlobServiceClient(account_url=account_url, credential=account_key) container_client = blob_service_client.create_container(container_name) sas_permissions = ContainerSasPermissions(read=True, write=True, delete=False, list=True) expiration = datetime.utcnow() + timedelta(hours=access_duration_hrs) sas_token = generate_container_sas(account_name, container_name, account_key=account_key, permission=sas_permissions, expiry=expiration) return '{}/{}?{}'.format(account_url, container_name, sas_token)
def get_container_sas_url_service( client: ContainerClient, *, read: bool = False, write: bool = False, delete: bool = False, list_: bool = False, delete_previous_version: bool = False, tag: bool = False, duration: datetime.timedelta = CONTAINER_SAS_DEFAULT_DURATION, ) -> str: account_name = client.account_name container_name = client.container_name account_key = get_storage_account_name_key_by_name(account_name) start, expiry = sas_time_window(duration) sas = generate_container_sas( account_name, container_name, account_key=account_key, permission=ContainerSasPermissions( read=read, write=write, delete=delete, list=list_, delete_previous_version=delete_previous_version, tag=tag, ), start=start, expiry=expiry, ) with_sas = ContainerClient( get_url(account_name), container_name=container_name, credential=sas, ) return cast(str, with_sas.url)
def generate_writable_container_sas(account_name: str, account_key: str, container_name: str, access_duration_hrs: float, account_url: Optional[str] = None) -> str: """ Creates a container and returns a SAS URI with read/write/list permissions. Args: account_name: str, name of blob storage account account_key: str, account SAS token or account shared access key container_name: str, name of container to create, must not match an existing container in the given storage account access_duration_hrs: float account_url: str, optional, defaults to default Azure Storage URL Returns: str, URL to newly created container Raises: azure.core.exceptions.ResourceExistsError, if container already exists """ if account_url is None: account_url = build_azure_storage_uri(account=account_name) with ContainerClient(account_url=account_url, container_name=container_name, credential=account_key) as container_client: container_client.create_container() permissions = ContainerSasPermissions(read=True, write=True, list=True) container_sas_token = generate_container_sas( account_name=account_name, container_name=container_name, account_key=account_key, permission=permissions, expiry=datetime.utcnow() + timedelta(hours=access_duration_hrs)) return f'{account_url}/{container_name}?{container_sas_token}'
def generate_sas(self, guid, user_delegation_key = None): """Generate shared access signature Make a short-lived shared access signature for the cube identified by guid. The signature should be good for queries immediately, but is not suitable for storing. Parameters ---------- guid : str ID for a cube user_delegation_key User delegation key, or None. defaults to self.user_delegation_key() Returns ------- sas : str Shared access signature Examples -------- >>> bauth = simple_blobstore_auth(resource) >>> sas = bauth.generate_sas(guid) >>> requests.post(f'{host}/graphql?{sas}', body) """ if user_delegation_key is None: user_delegation_key = self.user_delegation_key() now = datetime.datetime.utcnow() fivemin = datetime.timedelta(minutes = 5) return azblob.generate_container_sas( account_name = self.acc, container_name = guid, user_delegation_key = user_delegation_key, permission = 'r', start = now - fivemin, expiry = now + fivemin, )
async def test_sas_signature_is_scrubbed_off(self, resource_group, location, storage_account, storage_account_key): # Test can only run live if not self.is_live: pytest.skip("live only") bsc = BlobServiceClient(self._account_url(storage_account.name), storage_account_key) await self._setup(bsc) # Arrange container = bsc.get_container_client(self.container_name) token = generate_container_sas( container.account_name, container.container_name, account_key=container.credential.account_key, permission=ContainerSasPermissions(read=True), expiry=datetime.utcnow() + timedelta(hours=1), ) # parse out the signed signature token_components = parse_qs(token) signed_signature = quote( token_components[QueryStringConstants.SIGNED_SIGNATURE][0]) sas_service = ContainerClient.from_container_url(container.url, credential=token) # Act with LogCaptured(self) as log_captured: await sas_service.get_account_information(logging_enable=True) log_as_str = log_captured.getvalue() # Assert # make sure the query parameter 'sig' is logged, but its value is not self.assertTrue( QueryStringConstants.SIGNED_SIGNATURE in log_as_str) self.assertFalse(signed_signature in log_as_str)