Beispiel #1
0
    def upload_instance_setup(self):
        logger.info("uploading instance-specific-setup from %s", self.instance_specific)
        account_name = self.results["deploy"]["func-name"]["value"]
        key = self.results["deploy"]["func-key"]["value"]
        account_url = "https://%s.blob.core.windows.net" % account_name
        client = BlobServiceClient(account_url, credential=key)
        if "instance-specific-setup" not in [
            x["name"] for x in client.list_containers()
        ]:
            client.create_container("instance-specific-setup")

        expiry = datetime.utcnow() + timedelta(minutes=30)

        sas = generate_container_sas(
            account_name,
            "instance-specific-setup",
            account_key=key,
            permission=ContainerSasPermissions(
                read=True, write=True, delete=True, list=True
            ),
            expiry=expiry,
        )
        url = "%s/%s?%s" % (account_url, "instance-specific-setup", sas)

        subprocess.check_output(
            [
                self.azcopy,
                "sync",
                self.instance_specific,
                url,
                "--delete-destination",
                "true",
            ]
        )
Beispiel #2
0
def get_container_sas_url_service(
    client: ContainerClient,
    *,
    read: bool = False,
    write: bool = False,
    delete: bool = False,
    list: bool = False,
    delete_previous_version: bool = False,
    tag: bool = False,
) -> str:
    account_name = client.account_name
    container_name = client.container_name
    account_key = get_storage_account_name_key_by_name(account_name)

    sas = generate_container_sas(
        account_name,
        container_name,
        account_key=account_key,
        permission=ContainerSasPermissions(
            read=read,
            write=write,
            delete=delete,
            list=list,
            delete_previous_version=delete_previous_version,
            tag=tag,
        ),
        expiry=datetime.datetime.utcnow() + datetime.timedelta(days=30),
    )

    with_sas = ContainerClient(
        get_url(account_name),
        container_name=container_name,
        credential=sas,
    )
    return cast(str, with_sas.url)
Beispiel #3
0
    def upload_third_party(self):
        logger.info("uploading third-party tools from %s", self.third_party)
        account_name = self.results["deploy"]["fuzz-name"]["value"]
        key = self.results["deploy"]["fuzz-key"]["value"]
        account_url = "https://%s.blob.core.windows.net" % account_name

        client = BlobServiceClient(account_url, credential=key)
        containers = [x["name"] for x in client.list_containers()]

        for name in os.listdir(self.third_party):
            path = os.path.join(self.third_party, name)
            if not os.path.isdir(path):
                continue
            if name not in containers:
                client.create_container(name)

            expiry = datetime.utcnow() + timedelta(minutes=30)
            sas = generate_container_sas(
                account_name,
                name,
                account_key=key,
                permission=ContainerSasPermissions(
                    read=True, write=True, delete=True, list=True
                ),
                expiry=expiry,
            )
            url = "%s/%s?%s" % (account_url, name, sas)

            subprocess.check_output(
                [self.azcopy, "sync", path, url, "--delete-destination", "true"]
            )
Beispiel #4
0
    def container_access_policy(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Instantiate a BlobServiceClient using a connection string
        from azure.storage.blob import BlobServiceClient
        blob_service_client = BlobServiceClient.from_connection_string(
            self.connection_string)

        # Instantiate a ContainerClient
        container_client = blob_service_client.get_container_client(
            "myaccesscontainer")

        try:
            # Create new Container
            container_client.create_container()

            # [START set_container_access_policy]
            # Create access policy
            from azure.storage.blob import AccessPolicy, ContainerSasPermissions
            access_policy = AccessPolicy(
                permission=ContainerSasPermissions(read=True),
                expiry=datetime.utcnow() + timedelta(hours=1),
                start=datetime.utcnow() - timedelta(minutes=1))

            identifiers = {'test': access_policy}

            # Set the access policy on the container
            container_client.set_container_access_policy(
                signed_identifiers=identifiers)
            # [END set_container_access_policy]

            # [START get_container_access_policy]
            policy = container_client.get_container_access_policy()
            # [END get_container_access_policy]

            # [START generate_sas_token]
            # Use access policy to generate a sas token
            from azure.storage.blob import generate_container_sas

            sas_token = generate_container_sas(
                container_client.account_name,
                container_client.container_name,
                account_key=container_client.credential.account_key,
                policy_id='my-access-policy-id')
            # [END generate_sas_token]

            # Use the sas token to authenticate a new client
            # [START create_container_client_sastoken]
            from azure.storage.blob import ContainerClient
            container = ContainerClient.from_container_url(
                container_url=
                "https://account.blob.core.windows.net/mycontainer",
                credential=sas_token)
            # [END create_container_client_sastoken]

        finally:
            # Delete container
            container_client.delete_container()
Beispiel #5
0
def get_blob_url(blob_service,
                 blob_name,
                 storage_account,
                 container,
                 permissions=ContainerSasPermissions(read=True, list=True),
                 expire_hours=1,
                 start_hours=1):
    """
    Create a URL for the given blob with a shared access signature.

    The signature will expire based on expire_hours.
    """
    sas_token = create_sas_token(blob_service,
                                 storage_account,
                                 container,
                                 permissions=permissions,
                                 expire_hours=expire_hours,
                                 start_hours=start_hours)

    source_blob_url = ('https://{account}.blob.core.windows.net/'
                       '{container}/{blob}?{token}'.format(
                           account=storage_account,
                           container=container,
                           blob=blob_name,
                           token=sas_token))

    return source_blob_url
Beispiel #6
0
    def test_shared_access_container(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        container = self._create_container()
        blob_name = 'blob1'
        data = b'hello world'

        blob = container.get_blob_client(blob_name)
        blob.upload_blob(data)

        token = generate_container_sas(
            container.account_name,
            container.container_name,
            account_key=container.credential.account_key,
            expiry=datetime.utcnow() + timedelta(hours=1),
            permission=ContainerSasPermissions(read=True),
        )
        blob = BlobClient.from_blob_url(blob.url, credential=token)

        # Act
        response = requests.get(blob.url)

        # Assert
        self.assertTrue(response.ok)
        self.assertEqual(data, response.content)
    def test_user_delegation_sas_for_container(self):
        # SAS URL is calculated from storage key, so this test runs live only
        pytest.skip("Current Framework Cannot Support OAUTH")
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        token_credential = self.generate_oauth_token()
        service_client = BlobServiceClient(self._get_oauth_account_url(),
                                           credential=token_credential)
        user_delegation_key = service_client.get_user_delegation_key(
            datetime.utcnow(),
            datetime.utcnow() + timedelta(hours=1))

        container_client = service_client.create_container(
            self.get_resource_name('oauthcontainer'))
        token = container_client.generate_shared_access_signature(
            expiry=datetime.utcnow() + timedelta(hours=1),
            permission=ContainerSasPermissions(read=True),
            user_delegation_key=user_delegation_key,
            account_name='emilydevtest')

        blob_client = container_client.get_blob_client(
            self.get_resource_name('oauthblob'))
        blob_content = self.get_random_text_data(1024)
        blob_client.upload_blob(blob_content, length=len(blob_content))

        # Act
        new_blob_client = BlobClient.from_blob_url(blob_client.url,
                                                   credential=token)
        content = new_blob_client.download_blob()

        # Assert
        self.assertEqual(blob_content, b"".join(list(content)).decode('utf-8'))
Beispiel #8
0
    def test_sas_signature_is_scrubbed_off(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        container = self.bsc.get_container_client(self.container_name)
        token = container.generate_shared_access_signature(
            permission=ContainerSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        # parse out the signed signature
        token_components = parse_qs(token)
        signed_signature = quote(
            token_components[QueryStringConstants.SIGNED_SIGNATURE][0])

        sas_service = ContainerClient.from_container_url(container.url,
                                                         credential=token)

        # Act
        with LogCaptured(self) as log_captured:
            sas_service.get_account_information(logging_enable=True)
            log_as_str = log_captured.getvalue()

            # Assert
            # make sure the query parameter 'sig' is logged, but its value is not
            self.assertTrue(
                QueryStringConstants.SIGNED_SIGNATURE in log_as_str)
            self.assertFalse(signed_signature in log_as_str)
    async def test_sas_signature_is_scrubbed_off(self, storage_account_name,
                                                 storage_account_key):
        # Test can only run live

        bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"),
                                storage_account_key)
        await self._setup(bsc)
        # Arrange
        container = bsc.get_container_client(self.container_name)
        token = generate_container_sas(
            container.account_name,
            container.container_name,
            account_key=container.credential.account_key,
            permission=ContainerSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
        )
        # parse out the signed signature
        token_components = parse_qs(token)
        signed_signature = quote(
            token_components[QueryStringConstants.SIGNED_SIGNATURE][0])

        sas_service = ContainerClient.from_container_url(container.url,
                                                         credential=token)

        # Act
        with LogCaptured(self) as log_captured:
            await sas_service.get_account_information(logging_enable=True)
            log_as_str = log_captured.getvalue()

            # Assert
            # make sure the query parameter 'sig' is logged, but its value is not
            self.assertTrue(
                QueryStringConstants.SIGNED_SIGNATURE in log_as_str)
            self.assertFalse(signed_signature in log_as_str)
Beispiel #10
0
    def add_log_export(self) -> None:
        if not self.export_appinsights:
            logger.info("not exporting appinsights")
            return

        container_name = "app-insights"

        logger.info("adding appinsight log export")
        account_name = self.results["deploy"]["func_name"]["value"]
        key = self.results["deploy"]["func_key"]["value"]
        account_url = "https://%s.blob.core.windows.net" % account_name
        client = BlobServiceClient(account_url, credential=key)
        if container_name not in [x["name"] for x in client.list_containers()]:
            client.create_container(container_name)

        expiry = datetime.utcnow() + timedelta(days=2 * 365)

        # NOTE: as this is a long-lived SAS url, it should not be logged and only
        # used in the the later-on export_configurations.create() call
        sas = generate_container_sas(
            account_name,
            container_name,
            account_key=key,
            permission=ContainerSasPermissions(write=True),
            expiry=expiry,
        )
        url = "%s/%s?%s" % (account_url, container_name, sas)

        record_types = (
            "Requests, Event, Exceptions, Metrics, PageViews, "
            "PageViewPerformance, Rdd, PerformanceCounters, Availability")

        req = ApplicationInsightsComponentExportRequest(
            record_types=record_types,
            destination_type="Blob",
            is_enabled="true",
            destination_address=url,
        )

        credential = AzureCliCredential()
        app_insight_client = ApplicationInsightsManagementClient(
            credential,
            subscription_id=self.get_subscription_id(),
        )

        to_delete = []
        for entry in app_insight_client.export_configurations.list(
                self.resource_group, self.application_name):
            if (entry.storage_name == account_name
                    and entry.container_name == container_name):
                to_delete.append(entry.export_id)

        for export_id in to_delete:
            logger.info("replacing existing export: %s", export_id)
            app_insight_client.export_configurations.delete(
                self.resource_group, self.application_name, export_id)

        app_insight_client.export_configurations.create(
            self.resource_group, self.application_name, req)
 def getSasToken(self):
     container_sas_token = generate_container_sas(
         account_name=self.account_name,
         container_name=self.container_name,
         account_key=self.account_key,
         permission=ContainerSasPermissions(read=True),
         expiry=datetime.utcnow() + timedelta(hours=1))
     return container_sas_token, self.account_name, self.container_name
Beispiel #12
0
    async def container_access_policy_async(self):
        # Instantiate a BlobServiceClient using a connection string
        from azure.storage.blob.aio import BlobServiceClient
        blob_service_client = BlobServiceClient.from_connection_string(
            self.connection_string)

        async with blob_service_client:
            # Instantiate a ContainerClient
            container_client = blob_service_client.get_container_client(
                "myaccesscontainerasync")

            try:
                # Create new Container
                await container_client.create_container()

                # [START set_container_access_policy]
                # Create access policy
                from azure.storage.blob import AccessPolicy, ContainerSasPermissions
                access_policy = AccessPolicy(
                    permission=ContainerSasPermissions(read=True),
                    expiry=datetime.utcnow() + timedelta(hours=1),
                    start=datetime.utcnow() - timedelta(minutes=1))

                identifiers = {'my-access-policy-id': access_policy}

                # Set the access policy on the container
                await container_client.set_container_access_policy(
                    signed_identifiers=identifiers)
                # [END set_container_access_policy]

                # [START get_container_access_policy]
                policy = await container_client.get_container_access_policy()
                # [END get_container_access_policy]

                # [START generate_sas_token]
                # Use access policy to generate a sas token
                from azure.storage.blob import generate_container_sas

                sas_token = generate_container_sas(
                    container_client.account_name,
                    container_client.container_name,
                    account_key=container_client.credential.account_key,
                    policy_id='my-access-policy-id')
                # [END generate_sas_token]

                # Use the sas token to authenticate a new client
                # [START create_container_client_sastoken]
                from azure.storage.blob.aio import ContainerClient
                container = ContainerClient.from_container_url(
                    container_url=
                    "https://account.blob.core.windows.net/mycontainerasync",
                    credential=sas_token,
                )
                # [END create_container_client_sastoken]

            finally:
                # Delete container
                await container_client.delete_container()
def get_container_url(
    account_name: str,
    account_key: str,
    container_name: str,
    access_policy: str = "read_only",
    expiry: datetime.timedelta = datetime.timedelta(days=365),
    **kwargs,
) -> str:
    """Get the full url, for the given container on the given account, with a
    Shared Access Signature granting the specified access policy.

    Args:
      account_name: name of the storage account for which to generate the URL
      account_key: shared account key of the storage account used to generate the SAS
      container_name: name of the container for which to grant access in the storage
        account
      access_policy: one of ``read_only``, ``append_only``, ``full``
      expiry: the interval in the future with which the signature will expire

    Returns:
      the full URL of the container, with the shared access signature.
    """

    access_policies = {
        "read_only": ContainerSasPermissions(
            read=True, list=True, delete=False, write=False
        ),
        "append_only": ContainerSasPermissions(
            read=True, list=True, delete=False, write=True
        ),
        "full": ContainerSasPermissions(read=True, list=True, delete=True, write=True),
    }

    current_time = datetime.datetime.utcnow()

    signature = generate_container_sas(
        account_name,
        container_name,
        account_key=account_key,
        permission=access_policies[access_policy],
        start=current_time + datetime.timedelta(minutes=-1),
        expiry=current_time + expiry,
    )

    return f"https://{account_name}.blob.core.windows.net/{container_name}?{signature}"
Beispiel #14
0
def get_pdf_url_with_container_sas_token(blob_name, container_name):
    container_sas_token = generate_container_sas(
        account_name=ACCOUNT_NAME,
        container_name=container_name,
        account_key=ACCOUNT_KEY,
        permission=ContainerSasPermissions(read=True),
        expiry=datetime.utcnow() + timedelta(hours=1))
    blob_url_with_container_sas_token = f"https://{ACCOUNT_NAME}.blob.core.windows.net/{container_name}/{blob_name}?{container_sas_token}"
    return blob_url_with_container_sas_token
Beispiel #15
0
def get_img_url_with_blob_sas_token(blob_name):
    blob_sas_token = generate_blob_sas(
        account_name=account_name,
        container_name=container_name,
        blob_name=blob_name,
        account_key=account_key,
        permission=ContainerSasPermissions(read=True),
        expiry=datetime.utcnow() + timedelta(hours=1))
    blob_url_with_blob_sas_token = f"https://{account_name}.blob.core.windows.net/{container_name}/{blob_name}?{blob_sas_token}"
    return blob_url_with_blob_sas_token
Beispiel #16
0
 def get_image_url(self, image_name: str):
     """Generate an url including sas token to access image in the store."""
     container_sas = generate_container_sas(
         account_name=self.sa_name,
         account_key=self.sa_key,
         container_name=self.container_name,
         permission=ContainerSasPermissions(read=True, list=True),
         start=datetime.utcnow() - timedelta(days=1),
         expiry=datetime.utcnow() + timedelta(days=30))
     return f"https://{self.sa_name}.blob.core.windows.net/{self.container_name}/{image_name}?{container_sas}"
Beispiel #17
0
def generate_container_sastoken(container_client):
    sas_token = generate_container_sas(
        container_client.account_name,
        container_client.container_name,
        account_key=container_client.credential.account_key,
        permission=ContainerSasPermissions(read=True),
        expiry=datetime.utcnow() + timedelta(hours=730),
    )
    print('SAS token for the storage container ?{0}'.format(sas_token))

    return sas_token
Beispiel #18
0
def get_service_sas_token(connection_string, storage_account, container_name):
    blob_service_client = BlobServiceClient.from_connection_string(
        connection_string)
    service_sas_token = generate_container_sas(
        storage_account,
        container_name,
        account_key=blob_service_client.credential.account_key,
        permission=ContainerSasPermissions(read=True, write=True),
        expiry=datetime.utcnow() + timedelta(hours=1),
    )
    print(service_sas_token)
    return service_sas_token
Beispiel #19
0
    async def get_clips(self):
        blobs = self.store_service.list_blobs(name_starts_with=self.clips_path)

        if not self._SAS_token:
            self._SAS_token = generate_container_sas(
                account_name=self._account_name,
                container_name=self._container,
                account_key=self._account_key,
                permission=ContainerSasPermissions(read=True),
                expiry=datetime.datetime.utcnow() +
                datetime.timedelta(days=14))

        await self.retrieve_contents(blobs)
Beispiel #20
0
def generate_sas_with_sdk(connection_string, azure_container):
    account_name, account_key = build_creds(connection_string)

    sas_token = generate_container_sas(
        account_name,
        azure_container,
        account_key=account_key,
        permission=ContainerSasPermissions(read=True),
        expiry=datetime.utcnow() + timedelta(days=1),
    )

    return 'https://{}.blob.core.windows.net/{}/{{}}?{}'.format(
        account_name, azure_container, sas_token)
Beispiel #21
0
def getImgUrlWithContainerSasToken(blobName, containerName):
    content = requests.get(loginJsonFile)
    data = json.loads(content.content)
    accountName = data["Login"]["Account"]
    accountKey = data["Login"]["Key"]
    containerSasToken = generate_container_sas(
        account_name=accountName,
        container_name=containerName,
        account_key=accountKey,
        permission=ContainerSasPermissions(read=True),
        expiry=datetime.utcnow() + timedelta(hours=1))
    blob_url_with_container_sas_token = f"https://{accountName}.blob.core.windows.net/{containerName}/{blobName}?{containerSasToken}"

    return blob_url_with_container_sas_token
Beispiel #22
0
def deploy(file, version, product, group, container, days):

    file_size = os.stat(file).st_size
    file_url = f"https://{stroage_account_name}.blob.core.windows.net/{container}/{os.path.basename(file)}"
    file_sas = generate_container_sas(
        account_name=stroage_account_name,
        container_name=container,
        account_key=account_access_key,
        permission=ContainerSasPermissions(read=True, list=True),
        expiry=datetime.utcnow() + timedelta(days=days))

    with open(file, "rb") as f:
        file_sha256 = hashlib.sha256(f.read()).hexdigest().upper()

    iothub_conn_str = os.environ["AZURE_IOTHUB_CONNECTIONSTRING"]
    iothub_configuration = IoTHubConfigurationManager(iothub_conn_str)

    config = models.Configuration()

    config.id = "ota_v" + str(version)
    config.content = models.ConfigurationContent(
        device_content={
            "properties.desired.extFwInfo": {
                "version": version,
                "size": file_size,
                "url": file_url,
                "sas": file_sas,
                "sha256": file_sha256
            }
        })

    config.metrics = models.ConfigurationMetrics(
        queries={
            "Downloading":
            f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='downloading'",
            "Interrupted":
            f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='interrupted'",
            "Applying":
            f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='applying'",
            "Applied":
            f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='applied'",
            "Error":
            f"SELECT deviceId FROM devices WHERE configurations.[[{config.id}]].status='Applied' AND properties.reported.extFwInfo.Status='error'"
        })

    config.target_condition = f"tags.productType='{product}' AND tags.deviceGroup='{group}'"
    config.priority = version

    iothub_configuration.create_configuration(config)
    def test_set_container_acl_with_one_signed_identifier(self):
        # Arrange
        container = self._create_container()

        # Act
        access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True),
                                     expiry=datetime.utcnow() + timedelta(hours=1),
                                     start=datetime.utcnow())
        signed_identifiers = {'testid': access_policy}

        response = container.set_container_access_policy(signed_identifiers)

        # Assert
        self.assertIsNotNone(response.get('etag'))
        self.assertIsNotNone(response.get('last_modified'))
Beispiel #24
0
def create_sas_token(blob_service,
                     storage_account,
                     container,
                     permissions=ContainerSasPermissions(read=True, list=True),
                     expire_hours=1,
                     start_hours=1):
    expiry_time = datetime.utcnow() + timedelta(hours=expire_hours)
    start_time = datetime.utcnow() - timedelta(hours=start_hours)

    return generate_container_sas(
        storage_account,
        container,
        permission=permissions,
        expiry=expiry_time,
        start=start_time,
        account_key=blob_service.credential.account_key)
    def test_set_container_acl_with_signed_identifiers(self):
        # Arrange
        container = self._create_container()

        # Act
        access_policy = AccessPolicy(permission=ContainerSasPermissions(read=True),
                                     expiry=datetime.utcnow() + timedelta(hours=1),
                                     start=datetime.utcnow() - timedelta(minutes=1))
        identifiers = {'testid': access_policy}
        container.set_container_access_policy(identifiers)

        # Assert
        acl = container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual('testid', acl.get('signed_identifiers')[0].id)
        self.assertIsNone(acl.get('public_access'))
 def get_img_url_with_blob_sas_token(self,blob_name):
     """
     this function generates the sas token to display the blob
     on the webpage by making the url
     """
     blob_sas_token = generate_blob_sas(
         account_name=self.account_name,
         container_name=self.container_name,
         blob_name=blob_name,
         account_key=self.account_key,
         permission=ContainerSasPermissions(read=True),
         expiry=datetime.utcnow() + timedelta(hours=1)
     )
     blob_url_with_blob_sas_token = f"https://{self.account_name}.blob.core.windows.net/{self.container_name}/{blob_name}?{blob_sas_token}"
     return blob_url_with_blob_sas_token    
     
     
     
Beispiel #27
0
def add_container_sas_url(container_url: str) -> str:
    parsed = urllib.parse.urlparse(container_url)
    query = urllib.parse.parse_qs(parsed.query)
    if "sig" in query:
        return container_url
    else:
        account_name = parsed.netloc.split(".")[0]
        account_key = get_storage_account_name_key_by_name(account_name)
        sas_token = generate_container_sas(
            account_name=account_name,
            container_name=parsed.path.split("/")[1],
            account_key=account_key,
            permission=ContainerSasPermissions(read=True,
                                               write=True,
                                               delete=True,
                                               list=True),
            expiry=datetime.datetime.utcnow() + datetime.timedelta(hours=1),
        )
        return f"{container_url}?{sas_token}"
Beispiel #28
0
    def test_set_container_acl_with_lease_id(self):
        # Arrange
        container = self._create_container()
        lease_id = container.acquire_lease()

        # Act
        access_policy = AccessPolicy(
            permission=ContainerSasPermissions(read=True),
            expiry=datetime.utcnow() + timedelta(hours=1),
            start=datetime.utcnow())
        signed_identifiers = {'testid': access_policy}

        container.set_container_access_policy(signed_identifiers,
                                              lease=lease_id)

        # Assert
        acl = container.get_container_access_policy()
        self.assertIsNotNone(acl)
        self.assertIsNone(acl.get('public_access'))
Beispiel #29
0
    def build_output_file(self, output_file,
                          container_path) -> azure.batch.models.ResourceFile:
        """
        Uploads a local file to an Azure Blob storage container.

        Args:
            output_file: the name of the file produced as the output by the task
            container_path: the name of the file in the container

        Returns: 
            A ResourceFile initialized with a SAS URL appropriate for Batch tasks.
        """

        # where to store the outputs
        container_sas_url = (
            self.container_client.url + "?" + generate_container_sas(
                self.container_client.account_name,
                self.container_client.container_name,
                permission=ContainerSasPermissions(
                    read=True, write=True, delete=True, list=True),
                expiry=datetime.datetime.utcnow() + datetime.timedelta(
                    hours=self.config.STORAGE_ACCESS_DURATION_HRS),
                account_key=self.config.STORAGE_ACCOUNT_KEY,
            ))

        destination = models.OutputFileDestination(
            container=models.OutputFileBlobContainerDestination(
                container_url=container_sas_url, path=container_path))

        # Under what conditions should Azure Batch attempt to extract the outputs?
        upload_options = models.OutputFileUploadOptions(
            upload_condition=models.OutputFileUploadCondition.task_success)

        # https://docs.microsoft.com/en-us/azure/batch/batch-task-output-files#specify-output-files-for-task-output
        out = models.OutputFile(
            file_pattern=output_file,
            destination=destination,
            upload_options=upload_options,
        )
        self.output_files.append(container_path)

        return out
def generate_writable_container_sas(account_name: str,
                                    account_key: str,
                                    container_name: str,
                                    access_duration_hrs: float,
                                    account_url: Optional[str] = None) -> str:
    """Creates a container and returns a SAS URI with read/write/list
    permissions.

    Args:
        account_name: str, name of blob storage account
        account_key: str, account SAS token or account shared access key
        container_name: str, name of container to create, must not match an
            existing container in the given storage account
        access_duration_hrs: float
        account_url: str, optional, defaults to default Azure Storage URL

    Returns: str, URL to newly created container

    Raises: azure.core.exceptions.ResourceExistsError, if container already
        exists

    NOTE: This method currently fails on non-default Azure Storage URLs. The
    initializer for ContainerClient() assumes the default Azure Storage URL
    format, which is a bug that has been reported here:
        https://github.com/Azure/azure-sdk-for-python/issues/12568
    """
    if account_url is None:
        account_url = build_azure_storage_uri(account=account_name)
    container_client = ContainerClient(account_url=account_url,
                                       container_name=container_name,
                                       credential=account_key)
    container_client.create_container()

    permissions = ContainerSasPermissions(read=True, write=True, list=True)
    container_sas_token = generate_container_sas(
        account_name=account_name,
        container_name=container_name,
        account_key=account_key,
        permission=permissions,
        expiry=datetime.utcnow() + timedelta(hours=access_duration_hrs))

    return f'{account_url}/{container_name}?{container_sas_token}'