예제 #1
0
    def test_set_cors(self, resource_group, location, storage_account,
                      storage_account_key):
        bsc = BlobServiceClient(self._account_url(storage_account.name),
                                credential=storage_account_key)
        cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])

        allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
        allowed_methods = ['GET', 'PUT']
        max_age_in_seconds = 500
        exposed_headers = [
            "x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc",
            "x-ms-meta-bcd"
        ]
        allowed_headers = [
            "x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz",
            "x-ms-meta-foo"
        ]
        cors_rule2 = CorsRule(allowed_origins,
                              allowed_methods,
                              max_age_in_seconds=max_age_in_seconds,
                              exposed_headers=exposed_headers,
                              allowed_headers=allowed_headers)

        cors = [cors_rule1, cors_rule2]

        # Act
        bsc.set_service_properties(cors=cors)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_cors_equal(received_props['cors'], cors)
예제 #2
0
 def test_empty_set_service_properties_exception(self, resource_group,
                                                 location, storage_account,
                                                 storage_account_key):
     bsc = BlobServiceClient(self.account_url(storage_account, "blob"),
                             credential=storage_account_key)
     with self.assertRaises(ValueError):
         bsc.set_service_properties()
예제 #3
0
    def test_set_default_service_version(self, resource_group, location, storage_account, storage_account_key):
        # Arrange
        bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key)
        # Act
        bsc.set_service_properties(target_version='2014-02-14')

        # Assert
        received_props = bsc.get_service_properties()
        self.assertEqual(received_props['target_version'], '2014-02-14')
예제 #4
0
    def test_set_hour_metrics(self, resource_group, location, storage_account, storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key)
        hour_metrics = Metrics(enabled=True, include_apis=True, retention_policy=RetentionPolicy(enabled=True, days=5))

        # Act
        bsc.set_service_properties(hour_metrics=hour_metrics)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_metrics_equal(received_props['hour_metrics'], hour_metrics)
예제 #5
0
    def test_set_disabled_delete_retention_policy(self, resource_group, location, storage_account, storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key)
        delete_retention_policy = RetentionPolicy(enabled=False)

        # Act
        bsc.set_service_properties(delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy)
예제 #6
0
    def test_set_logging(self, resource_group, location, storage_account, storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key)
        logging = BlobAnalyticsLogging(read=True, write=True, delete=True, retention_policy=RetentionPolicy(enabled=True, days=5))

        # Act
        bsc.set_service_properties(analytics_logging=logging)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_logging_equal(received_props['analytics_logging'], logging)
    def test_set_delete_retention_policy(self, storage_account_name, storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key)
        delete_retention_policy = RetentionPolicy(enabled=True, days=2)

        # Act
        bsc.set_service_properties(delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_delete_retention_policy_equal(received_props['delete_retention_policy'], delete_retention_policy)
    def test_set_minute_metrics(self, storage_account_name, storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key)
        minute_metrics = Metrics(enabled=True, include_apis=True,
                                 retention_policy=RetentionPolicy(enabled=True, days=5))

        # Act
        bsc.set_service_properties(minute_metrics=minute_metrics)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_metrics_equal(received_props['minute_metrics'], minute_metrics)
예제 #9
0
    def test_disabled_static_website_properties(self, resource_group, location, storage_account, storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key)
        static_website = StaticWebsite(enabled=False, index_document="index.html",
                                       error_document404_path="errors/error/404error.html")

        # Act
        bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_static_website_equal(received_props['static_website'], StaticWebsite(enabled=False))
    def test_set_static_website_properties_with_default_index_document_path(self, storage_account_name, storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account_name, "blob"), credential=storage_account_key)
        static_website = StaticWebsite(
            enabled=True,
            error_document404_path="errors/error/404error.html",
            default_index_document_path="index.html")

        # Act
        bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_static_website_equal(received_props['static_website'], static_website)
예제 #11
0
    def test_set_static_website_props_dont_impact_other_props(
            self, resource_group, location, storage_account,
            storage_account_key):
        bsc = BlobServiceClient(self._account_url(storage_account.name),
                                credential=storage_account_key)
        cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])

        allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
        allowed_methods = ['GET', 'PUT']
        max_age_in_seconds = 500
        exposed_headers = [
            "x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc",
            "x-ms-meta-bcd"
        ]
        allowed_headers = [
            "x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz",
            "x-ms-meta-foo"
        ]
        cors_rule2 = CorsRule(allowed_origins,
                              allowed_methods,
                              max_age_in_seconds=max_age_in_seconds,
                              exposed_headers=exposed_headers,
                              allowed_headers=allowed_headers)

        cors = [cors_rule1, cors_rule2]

        # Act to set cors
        bsc.set_service_properties(cors=cors)

        # Assert cors is updated
        received_props = bsc.get_service_properties()
        self._assert_cors_equal(received_props['cors'], cors)

        # Arrange to set static website properties
        static_website = StaticWebsite(
            enabled=True,
            index_document="index.html",
            error_document404_path="errors/error/404error.html")

        # Act to set static website
        bsc.set_service_properties(static_website=static_website)

        # Assert static website was updated was cors was unchanged
        received_props = bsc.get_service_properties()
        self._assert_static_website_equal(received_props['static_website'],
                                          static_website)
        self._assert_cors_equal(received_props['cors'], cors)
예제 #12
0
    def test_set_static_website_properties_missing_field(self, resource_group, location, storage_account, storage_account_key):
        bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key)

        # Case1: Arrange both missing
        static_website = StaticWebsite(enabled=True)

        # Act
        bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_static_website_equal(received_props['static_website'], static_website)

        # Case2: Arrange index document missing
        static_website = StaticWebsite(enabled=True, error_document404_path="errors/error/404error.html")

        # Act
        bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_static_website_equal(received_props['static_website'], static_website)

        # Case3: Arrange error document missing
        static_website = StaticWebsite(enabled=True, index_document="index.html")

        # Act
        bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_static_website_equal(received_props['static_website'], static_website)
예제 #13
0
    def test_set_delete_retention_policy_edge_cases(self, resource_group,
                                                    location, storage_account,
                                                    storage_account_key):
        bsc = BlobServiceClient(self._account_url(storage_account.name),
                                credential=storage_account_key)
        delete_retention_policy = RetentionPolicy(enabled=True, days=1)
        bsc.set_service_properties(
            delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_delete_retention_policy_equal(
            received_props['delete_retention_policy'], delete_retention_policy)

        # Should work with maximum settings
        delete_retention_policy = RetentionPolicy(enabled=True, days=365)
        bsc.set_service_properties(
            delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_delete_retention_policy_equal(
            received_props['delete_retention_policy'], delete_retention_policy)

        # Should not work with 0 days
        delete_retention_policy = RetentionPolicy(enabled=True, days=0)

        with self.assertRaises(ValidationError):
            bsc.set_service_properties(
                delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_delete_retention_policy_not_equal(
            received_props['delete_retention_policy'], delete_retention_policy)

        # Should not work with 366 days
        delete_retention_policy = RetentionPolicy(enabled=True, days=366)

        with self.assertRaises(HttpResponseError):
            bsc.set_service_properties(
                delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = bsc.get_service_properties()
        self._assert_delete_retention_policy_not_equal(
            received_props['delete_retention_policy'], delete_retention_policy)
예제 #14
0
    def test_blob_service_properties(self, resource_group, location, storage_account, storage_account_key):
        # Arrange
        bsc = BlobServiceClient(self.account_url(storage_account.name, "blob"), credential=storage_account_key)
        # Act
        resp = bsc.set_service_properties(
            analytics_logging=BlobAnalyticsLogging(),
            hour_metrics=Metrics(),
            minute_metrics=Metrics(),
            cors=list(),
            target_version='2014-02-14'
        )

        # Assert
        self.assertIsNone(resp)
        props = bsc.get_service_properties()
        self._assert_properties_default(props)
        self.assertEqual('2014-02-14', props['target_version'])
class ServicePropertiesTest(StorageTestCase):
    def setUp(self):
        super(ServicePropertiesTest, self).setUp()

        url = self._get_account_url()
        credential = self._get_shared_key_credential()
        self.bsc = BlobServiceClient(url, credential=credential)

    # --Helpers-----------------------------------------------------------------
    def _assert_properties_default(self, prop):
        self.assertIsNotNone(prop)

        self._assert_logging_equal(prop.logging, BlobAnalyticsLogging())
        self._assert_metrics_equal(prop.hour_metrics, Metrics())
        self._assert_metrics_equal(prop.minute_metrics, Metrics())
        self._assert_cors_equal(prop.cors, list())

    def _assert_logging_equal(self, log1, log2):
        if log1 is None or log2 is None:
            self.assertEqual(log1, log2)
            return

        self.assertEqual(log1.version, log2.version)
        self.assertEqual(log1.read, log2.read)
        self.assertEqual(log1.write, log2.write)
        self.assertEqual(log1.delete, log2.delete)
        self._assert_retention_equal(log1.retention_policy,
                                     log2.retention_policy)

    def _assert_delete_retention_policy_equal(self, policy1, policy2):
        if policy1 is None or policy2 is None:
            self.assertEqual(policy1, policy2)
            return

        self.assertEqual(policy1.enabled, policy2.enabled)
        self.assertEqual(policy1.days, policy2.days)

    def _assert_static_website_equal(self, prop1, prop2):
        if prop1 is None or prop2 is None:
            self.assertEqual(prop1, prop2)
            return

        self.assertEqual(prop1.enabled, prop2.enabled)
        self.assertEqual(prop1.index_document, prop2.index_document)
        self.assertEqual(prop1.error_document404_path,
                         prop2.error_document404_path)

    def _assert_delete_retention_policy_not_equal(self, policy1, policy2):
        if policy1 is None or policy2 is None:
            self.assertNotEqual(policy1, policy2)
            return

        self.assertFalse(policy1.enabled == policy2.enabled
                         and policy1.days == policy2.days)

    def _assert_metrics_equal(self, metrics1, metrics2):
        if metrics1 is None or metrics2 is None:
            self.assertEqual(metrics1, metrics2)
            return

        self.assertEqual(metrics1.version, metrics2.version)
        self.assertEqual(metrics1.enabled, metrics2.enabled)
        self.assertEqual(metrics1.include_apis, metrics2.include_apis)
        self._assert_retention_equal(metrics1.retention_policy,
                                     metrics2.retention_policy)

    def _assert_cors_equal(self, cors1, cors2):
        if cors1 is None or cors2 is None:
            self.assertEqual(cors1, cors2)
            return

        self.assertEqual(len(cors1), len(cors2))

        for i in range(0, len(cors1)):
            rule1 = cors1[i]
            rule2 = cors2[i]
            self.assertEqual(len(rule1.allowed_origins),
                             len(rule2.allowed_origins))
            self.assertEqual(len(rule1.allowed_methods),
                             len(rule2.allowed_methods))
            self.assertEqual(rule1.max_age_in_seconds,
                             rule2.max_age_in_seconds)
            self.assertEqual(len(rule1.exposed_headers),
                             len(rule2.exposed_headers))
            self.assertEqual(len(rule1.allowed_headers),
                             len(rule2.allowed_headers))

    def _assert_retention_equal(self, ret1, ret2):
        self.assertEqual(ret1.enabled, ret2.enabled)
        self.assertEqual(ret1.days, ret2.days)

    # --Test cases per service ---------------------------------------

    @record
    def test_blob_service_properties(self):
        # Arrange

        # Act
        resp = self.bsc.set_service_properties(
            analytics_logging=BlobAnalyticsLogging(),
            hour_metrics=Metrics(),
            minute_metrics=Metrics(),
            cors=list(),
            target_version='2014-02-14')

        # Assert
        self.assertIsNone(resp)
        props = self.bsc.get_service_properties()
        self._assert_properties_default(props)
        self.assertEqual('2014-02-14', props.default_service_version)

    # --Test cases per feature ---------------------------------------
    @record
    def test_set_default_service_version(self):
        # Arrange

        # Act
        self.bsc.set_service_properties(target_version='2014-02-14')

        # Assert
        received_props = self.bsc.get_service_properties()
        self.assertEqual(received_props.default_service_version, '2014-02-14')

    @record
    def test_set_delete_retention_policy(self):
        # Arrange
        delete_retention_policy = RetentionPolicy(enabled=True, days=2)

        # Act
        self.bsc.set_service_properties(
            delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_delete_retention_policy_equal(
            received_props.delete_retention_policy, delete_retention_policy)

    @record
    def test_set_delete_retention_policy_edge_cases(self):
        # Should work with minimum settings
        delete_retention_policy = RetentionPolicy(enabled=True, days=1)
        self.bsc.set_service_properties(
            delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_delete_retention_policy_equal(
            received_props.delete_retention_policy, delete_retention_policy)

        # Should work with maximum settings
        delete_retention_policy = RetentionPolicy(enabled=True, days=365)
        self.bsc.set_service_properties(
            delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_delete_retention_policy_equal(
            received_props.delete_retention_policy, delete_retention_policy)

        # Should not work with 0 days
        delete_retention_policy = RetentionPolicy(enabled=True, days=0)

        with self.assertRaises(ValidationError):
            self.bsc.set_service_properties(
                delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_delete_retention_policy_not_equal(
            received_props.delete_retention_policy, delete_retention_policy)

        # Should not work with 366 days
        delete_retention_policy = RetentionPolicy(enabled=True, days=366)

        with self.assertRaises(HttpResponseError):
            self.bsc.set_service_properties(
                delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_delete_retention_policy_not_equal(
            received_props.delete_retention_policy, delete_retention_policy)

    @record
    def test_set_disabled_delete_retention_policy(self):
        # Arrange
        delete_retention_policy = RetentionPolicy(enabled=False)

        # Act
        self.bsc.set_service_properties(
            delete_retention_policy=delete_retention_policy)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_delete_retention_policy_equal(
            received_props.delete_retention_policy, delete_retention_policy)

    @record
    def test_set_static_website_properties(self):
        # Arrange
        static_website = StaticWebsite(
            enabled=True,
            index_document="index.html",
            error_document404_path="errors/error/404error.html")

        # Act
        self.bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_static_website_equal(received_props.static_website,
                                          static_website)

    @record
    def test_set_static_website_properties_missing_field(self):
        # Case1: Arrange both missing
        static_website = StaticWebsite(enabled=True)

        # Act
        self.bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_static_website_equal(received_props.static_website,
                                          static_website)

        # Case2: Arrange index document missing
        static_website = StaticWebsite(
            enabled=True, error_document404_path="errors/error/404error.html")

        # Act
        self.bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_static_website_equal(received_props.static_website,
                                          static_website)

        # Case3: Arrange error document missing
        static_website = StaticWebsite(enabled=True,
                                       index_document="index.html")

        # Act
        self.bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_static_website_equal(received_props.static_website,
                                          static_website)

    @record
    def test_disabled_static_website_properties(self):
        # Arrange
        static_website = StaticWebsite(
            enabled=False,
            index_document="index.html",
            error_document404_path="errors/error/404error.html")

        # Act
        self.bsc.set_service_properties(static_website=static_website)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_static_website_equal(received_props.static_website,
                                          StaticWebsite(enabled=False))

    @record
    def test_set_static_website_properties_does_not_impact_other_properties(
            self):
        # Arrange to set cors
        cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])

        allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
        allowed_methods = ['GET', 'PUT']
        max_age_in_seconds = 500
        exposed_headers = [
            "x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc",
            "x-ms-meta-bcd"
        ]
        allowed_headers = [
            "x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz",
            "x-ms-meta-foo"
        ]
        cors_rule2 = CorsRule(allowed_origins,
                              allowed_methods,
                              max_age_in_seconds=max_age_in_seconds,
                              exposed_headers=exposed_headers,
                              allowed_headers=allowed_headers)

        cors = [cors_rule1, cors_rule2]

        # Act to set cors
        self.bsc.set_service_properties(cors=cors)

        # Assert cors is updated
        received_props = self.bsc.get_service_properties()
        self._assert_cors_equal(received_props.cors, cors)

        # Arrange to set static website properties
        static_website = StaticWebsite(
            enabled=True,
            index_document="index.html",
            error_document404_path="errors/error/404error.html")

        # Act to set static website
        self.bsc.set_service_properties(static_website=static_website)

        # Assert static website was updated was cors was unchanged
        received_props = self.bsc.get_service_properties()
        self._assert_static_website_equal(received_props.static_website,
                                          static_website)
        self._assert_cors_equal(received_props.cors, cors)

    @record
    def test_set_logging(self):
        # Arrange
        logging = BlobAnalyticsLogging(read=True,
                                       write=True,
                                       delete=True,
                                       retention_policy=RetentionPolicy(
                                           enabled=True, days=5))

        # Act
        self.bsc.set_service_properties(analytics_logging=logging)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_logging_equal(received_props.logging, logging)

    @record
    def test_set_hour_metrics(self):
        # Arrange
        hour_metrics = Metrics(enabled=True,
                               include_apis=True,
                               retention_policy=RetentionPolicy(enabled=True,
                                                                days=5))

        # Act
        self.bsc.set_service_properties(hour_metrics=hour_metrics)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_metrics_equal(received_props.hour_metrics, hour_metrics)

    @record
    def test_set_minute_metrics(self):
        # Arrange
        minute_metrics = Metrics(enabled=True,
                                 include_apis=True,
                                 retention_policy=RetentionPolicy(enabled=True,
                                                                  days=5))

        # Act
        self.bsc.set_service_properties(minute_metrics=minute_metrics)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_metrics_equal(received_props.minute_metrics,
                                   minute_metrics)

    @record
    def test_set_cors(self):
        # Arrange
        cors_rule1 = CorsRule(['www.xyz.com'], ['GET'])

        allowed_origins = ['www.xyz.com', "www.ab.com", "www.bc.com"]
        allowed_methods = ['GET', 'PUT']
        max_age_in_seconds = 500
        exposed_headers = [
            "x-ms-meta-data*", "x-ms-meta-source*", "x-ms-meta-abc",
            "x-ms-meta-bcd"
        ]
        allowed_headers = [
            "x-ms-meta-data*", "x-ms-meta-target*", "x-ms-meta-xyz",
            "x-ms-meta-foo"
        ]
        cors_rule2 = CorsRule(allowed_origins,
                              allowed_methods,
                              max_age_in_seconds=max_age_in_seconds,
                              exposed_headers=exposed_headers,
                              allowed_headers=allowed_headers)

        cors = [cors_rule1, cors_rule2]

        # Act
        self.bsc.set_service_properties(cors=cors)

        # Assert
        received_props = self.bsc.get_service_properties()
        self._assert_cors_equal(received_props.cors, cors)

    # --Test cases for errors ---------------------------------------
    @record
    def test_retention_no_days(self):
        # Assert
        self.assertRaises(ValueError, RetentionPolicy, True, None)

    @record
    def test_too_many_cors_rules(self):
        # Arrange
        cors = []
        for i in range(0, 6):
            cors.append(CorsRule(['www.xyz.com'], ['GET']))

        # Assert
        self.assertRaises(HttpResponseError, self.bsc.set_service_properties,
                          None, None, None, cors)

    @record
    def test_retention_too_long(self):
        # Arrange
        minute_metrics = Metrics(enabled=True,
                                 include_apis=True,
                                 retention_policy=RetentionPolicy(enabled=True,
                                                                  days=366))

        # Assert
        self.assertRaises(HttpResponseError, self.bsc.set_service_properties,
                          None, None, minute_metrics)
예제 #16
0
class DataLakeServiceClient(StorageAccountHostsMixin):
    """A client to interact with the DataLake Service at the account level.

    This client provides operations to retrieve and configure the account properties
    as well as list, create and delete file systems within the account.
    For operations relating to a specific file system, directory or file, clients for those entities
    can also be retrieved using the `get_client` functions.

    :ivar str url:
        The full endpoint URL to the datalake service endpoint.
    :ivar str primary_endpoint:
        The full primary endpoint URL.
    :ivar str primary_hostname:
        The hostname of the primary endpoint.
    :param str account_url:
        The URL to the DataLake storage account. Any other entities included
        in the URL path (e.g. file system or file) will be discarded. This URL can be optionally
        authenticated with a SAS token.
    :param credential:
        The credentials with which to authenticate. This is optional if the
        account URL already has a SAS token. The value can be a SAS token string,
        an instance of a AzureSasCredential from azure.core.credentials, an account
        shared access key, or an instance of a TokenCredentials class from azure.identity.
        If the resource URI already contains a SAS token, this will be ignored in favor of an explicit credential
        - except in the case of AzureSasCredential, where the conflicting SAS tokens will raise a ValueError.

    .. admonition:: Example:

        .. literalinclude:: ../samples/datalake_samples_service.py
            :start-after: [START create_datalake_service_client]
            :end-before: [END create_datalake_service_client]
            :language: python
            :dedent: 8
            :caption: Creating the DataLakeServiceClient from connection string.

        .. literalinclude:: ../samples/datalake_samples_service.py
            :start-after: [START create_datalake_service_client_oauth]
            :end-before: [END create_datalake_service_client_oauth]
            :language: python
            :dedent: 8
            :caption: Creating the DataLakeServiceClient with Azure Identity credentials.
    """
    def __init__(
            self,
            account_url,  # type: str
            credential=None,  # type: Optional[Any]
            **kwargs  # type: Any
    ):
        # type: (...) -> None
        try:
            if not account_url.lower().startswith('http'):
                account_url = "https://" + account_url
        except AttributeError:
            raise ValueError("Account URL must be a string.")
        parsed_url = urlparse(account_url.rstrip('/'))
        if not parsed_url.netloc:
            raise ValueError("Invalid URL: {}".format(account_url))

        blob_account_url = convert_dfs_url_to_blob_url(account_url)
        self._blob_account_url = blob_account_url
        self._blob_service_client = BlobServiceClient(blob_account_url,
                                                      credential, **kwargs)
        self._blob_service_client._hosts[LocationMode.SECONDARY] = ""  #pylint: disable=protected-access

        _, sas_token = parse_query(parsed_url.query)
        self._query_str, self._raw_credential = self._format_query_string(
            sas_token, credential)

        super(DataLakeServiceClient,
              self).__init__(parsed_url,
                             service='dfs',
                             credential=self._raw_credential,
                             **kwargs)
        # ADLS doesn't support secondary endpoint, make sure it's empty
        self._hosts[LocationMode.SECONDARY] = ""

    def __enter__(self):
        self._blob_service_client.__enter__()
        return self

    def __exit__(self, *args):
        self._blob_service_client.close()

    def close(self):
        # type: () -> None
        """ This method is to close the sockets opened by the client.
        It need not be used when using with a context manager.
        """
        self._blob_service_client.close()

    def _format_url(self, hostname):
        """Format the endpoint URL according to hostname
        """
        formated_url = "{}://{}/{}".format(self.scheme, hostname,
                                           self._query_str)
        return formated_url

    @classmethod
    def from_connection_string(
        cls,
        conn_str,  # type: str
        credential=None,  # type: Optional[Any]
        **kwargs  # type: Any
    ):  # type: (...) -> DataLakeServiceClient
        """
        Create DataLakeServiceClient from a Connection String.

        :param str conn_str:
            A connection string to an Azure Storage account.
        :param credential:
            The credentials with which to authenticate. This is optional if the
            account URL already has a SAS token, or the connection string already has shared
            access key values. The value can be a SAS token string,
            an instance of a AzureSasCredential from azure.core.credentials, an account shared access
            key, or an instance of a TokenCredentials class from azure.identity.
            Credentials provided here will take precedence over those in the connection string.
        :return a DataLakeServiceClient
        :rtype ~azure.storage.filedatalake.DataLakeServiceClient

        .. admonition:: Example:

            .. literalinclude:: ../samples/datalake_samples_file_system.py
                :start-after: [START create_data_lake_service_client_from_conn_str]
                :end-before: [END create_data_lake_service_client_from_conn_str]
                :language: python
                :dedent: 8
                :caption: Creating the DataLakeServiceClient from a connection string.
        """
        account_url, _, credential = parse_connection_str(
            conn_str, credential, 'dfs')
        return cls(account_url, credential=credential, **kwargs)

    def get_user_delegation_key(
            self,
            key_start_time,  # type: datetime
            key_expiry_time,  # type: datetime
            **kwargs  # type: Any
    ):
        # type: (...) -> UserDelegationKey
        """
        Obtain a user delegation key for the purpose of signing SAS tokens.
        A token credential must be present on the service object for this request to succeed.

        :param ~datetime.datetime key_start_time:
            A DateTime value. Indicates when the key becomes valid.
        :param ~datetime.datetime key_expiry_time:
            A DateTime value. Indicates when the key stops being valid.
        :keyword int timeout:
            The timeout parameter is expressed in seconds.
        :return: The user delegation key.
        :rtype: ~azure.storage.filedatalake.UserDelegationKey

        .. admonition:: Example:

            .. literalinclude:: ../samples/datalake_samples_service.py
                :start-after: [START get_user_delegation_key]
                :end-before: [END get_user_delegation_key]
                :language: python
                :dedent: 8
                :caption: Get user delegation key from datalake service client.
        """
        delegation_key = self._blob_service_client.get_user_delegation_key(
            key_start_time=key_start_time,
            key_expiry_time=key_expiry_time,
            **kwargs)  # pylint: disable=protected-access
        return UserDelegationKey._from_generated(delegation_key)  # pylint: disable=protected-access

    def list_file_systems(
            self,
            name_starts_with=None,  # type: Optional[str]
            include_metadata=None,  # type: Optional[bool]
            **kwargs):
        # type: (...) -> ItemPaged[FileSystemProperties]
        """Returns a generator to list the file systems under the specified account.

        The generator will lazily follow the continuation tokens returned by
        the service and stop when all file systems have been returned.

        :param str name_starts_with:
            Filters the results to return only file systems whose names
            begin with the specified prefix.
        :param bool include_metadata:
            Specifies that file system metadata be returned in the response.
            The default value is `False`.
        :keyword int results_per_page:
            The maximum number of file system names to retrieve per API
            call. If the request does not specify the server will return up to 5,000 items per page.
        :keyword int timeout:
            The timeout parameter is expressed in seconds.
        :keyword bool include_deleted:
            Specifies that deleted file systems to be returned in the response. This is for file system restore enabled
            account. The default value is `False`.
            .. versionadded:: 12.3.0
        :returns: An iterable (auto-paging) of FileSystemProperties.
        :rtype: ~azure.core.paging.ItemPaged[~azure.storage.filedatalake.FileSystemProperties]

        .. admonition:: Example:

            .. literalinclude:: ../samples/datalake_samples_service.py
                :start-after: [START list_file_systems]
                :end-before: [END list_file_systems]
                :language: python
                :dedent: 8
                :caption: Listing the file systems in the datalake service.
        """
        item_paged = self._blob_service_client.list_containers(
            name_starts_with=name_starts_with,
            include_metadata=include_metadata,
            **kwargs)  # pylint: disable=protected-access
        item_paged._page_iterator_class = FileSystemPropertiesPaged  # pylint: disable=protected-access
        return item_paged

    def create_file_system(
            self,
            file_system,  # type: Union[FileSystemProperties, str]
            metadata=None,  # type: Optional[Dict[str, str]]
            public_access=None,  # type: Optional[PublicAccess]
            **kwargs):
        # type: (...) -> FileSystemClient
        """Creates a new file system under the specified account.

        If the file system with the same name already exists, a ResourceExistsError will
        be raised. This method returns a client with which to interact with the newly
        created file system.

        :param str file_system:
            The name of the file system to create.
        :param metadata:
            A dict with name-value pairs to associate with the
            file system as metadata. Example: `{'Category':'test'}`
        :type metadata: dict(str, str)
        :param public_access:
            Possible values include: file system, file.
        :type public_access: ~azure.storage.filedatalake.PublicAccess
        :keyword int timeout:
            The timeout parameter is expressed in seconds.
        :rtype: ~azure.storage.filedatalake.FileSystemClient

        .. admonition:: Example:

            .. literalinclude:: ../samples/datalake_samples_service.py
                :start-after: [START create_file_system_from_service_client]
                :end-before: [END create_file_system_from_service_client]
                :language: python
                :dedent: 8
                :caption: Creating a file system in the datalake service.
        """
        file_system_client = self.get_file_system_client(file_system)
        file_system_client.create_file_system(metadata=metadata,
                                              public_access=public_access,
                                              **kwargs)
        return file_system_client

    def _rename_file_system(self, name, new_name, **kwargs):
        # type: (str, str, **Any) -> FileSystemClient
        """Renames a filesystem.

        Operation is successful only if the source filesystem exists.

        :param str name:
            The name of the filesystem to rename.
        :param str new_name:
            The new filesystem name the user wants to rename to.
        :keyword lease:
            Specify this to perform only if the lease ID given
            matches the active lease ID of the source filesystem.
        :paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
        :keyword int timeout:
            The timeout parameter is expressed in seconds.
        :rtype: ~azure.storage.filedatalake.FileSystemClient
        """
        self._blob_service_client._rename_container(name, new_name, **kwargs)  # pylint: disable=protected-access
        renamed_file_system = self.get_file_system_client(new_name)
        return renamed_file_system

    def undelete_file_system(self, name, deleted_version, **kwargs):
        # type: (str, str, **Any) -> FileSystemClient
        """Restores soft-deleted filesystem.

        Operation will only be successful if used within the specified number of days
        set in the delete retention policy.

        .. versionadded:: 12.3.0
            This operation was introduced in API version '2019-12-12'.

        :param str name:
            Specifies the name of the deleted filesystem to restore.
        :param str deleted_version:
            Specifies the version of the deleted filesystem to restore.
        :keyword int timeout:
            The timeout parameter is expressed in seconds.
        :rtype: ~azure.storage.filedatalake.FileSystemClient
        """
        new_name = kwargs.pop('new_name', None)
        file_system = self.get_file_system_client(new_name or name)
        self._blob_service_client.undelete_container(name,
                                                     deleted_version,
                                                     new_name=new_name,
                                                     **kwargs)  # pylint: disable=protected-access
        return file_system

    def delete_file_system(
            self,
            file_system,  # type: Union[FileSystemProperties, str]
            **kwargs):
        # type: (...) -> FileSystemClient
        """Marks the specified file system for deletion.

        The file system and any files contained within it are later deleted during garbage collection.
        If the file system is not found, a ResourceNotFoundError will be raised.

        :param file_system:
            The file system to delete. This can either be the name of the file system,
            or an instance of FileSystemProperties.
        :type file_system: str or ~azure.storage.filedatalake.FileSystemProperties
        :keyword lease:
            If specified, delete_file_system only succeeds if the
            file system's lease is active and matches this ID.
            Required if the file system has an active lease.
        :paramtype lease: ~azure.storage.filedatalake.DataLakeLeaseClient or str
        :keyword ~datetime.datetime if_modified_since:
            A DateTime value. Azure expects the date value passed in to be UTC.
            If timezone is included, any non-UTC datetimes will be converted to UTC.
            If a date is passed in without timezone info, it is assumed to be UTC.
            Specify this header to perform the operation only
            if the resource has been modified since the specified time.
        :keyword ~datetime.datetime if_unmodified_since:
            A DateTime value. Azure expects the date value passed in to be UTC.
            If timezone is included, any non-UTC datetimes will be converted to UTC.
            If a date is passed in without timezone info, it is assumed to be UTC.
            Specify this header to perform the operation only if
            the resource has not been modified since the specified date/time.
        :keyword str etag:
            An ETag value, or the wildcard character (*). Used to check if the resource has changed,
            and act according to the condition specified by the `match_condition` parameter.
        :keyword ~azure.core.MatchConditions match_condition:
            The match condition to use upon the etag.
        :keyword int timeout:
            The timeout parameter is expressed in seconds.
        :rtype: None

        .. admonition:: Example:

            .. literalinclude:: ../samples/datalake_samples_service.py
                :start-after: [START delete_file_system_from_service_client]
                :end-before: [END delete_file_system_from_service_client]
                :language: python
                :dedent: 8
                :caption: Deleting a file system in the datalake service.
        """
        file_system_client = self.get_file_system_client(file_system)
        file_system_client.delete_file_system(**kwargs)
        return file_system_client

    def get_file_system_client(
            self,
            file_system  # type: Union[FileSystemProperties, str]
    ):
        # type: (...) -> FileSystemClient
        """Get a client to interact with the specified file system.

        The file system need not already exist.

        :param file_system:
            The file system. This can either be the name of the file system,
            or an instance of FileSystemProperties.
        :type file_system: str or ~azure.storage.filedatalake.FileSystemProperties
        :returns: A FileSystemClient.
        :rtype: ~azure.storage.filedatalake.FileSystemClient

        .. admonition:: Example:

            .. literalinclude:: ../samples/datalake_samples_file_system.py
                :start-after: [START create_file_system_client_from_service]
                :end-before: [END create_file_system_client_from_service]
                :language: python
                :dedent: 8
                :caption: Getting the file system client to interact with a specific file system.
        """
        try:
            file_system_name = file_system.name
        except AttributeError:
            file_system_name = file_system

        _pipeline = Pipeline(
            transport=TransportWrapper(self._pipeline._transport
                                       ),  # pylint: disable = protected-access
            policies=self._pipeline.
            _impl_policies  # pylint: disable = protected-access
        )
        return FileSystemClient(
            self.url,
            file_system_name,
            credential=self._raw_credential,
            _configuration=self._config,
            _pipeline=_pipeline,
            _hosts=self._hosts,
            require_encryption=self.require_encryption,
            key_encryption_key=self.key_encryption_key,
            key_resolver_function=self.key_resolver_function)

    def get_directory_client(
        self,
        file_system,  # type: Union[FileSystemProperties, str]
        directory  # type: Union[DirectoryProperties, str]
    ):
        # type: (...) -> DataLakeDirectoryClient
        """Get a client to interact with the specified directory.

        The directory need not already exist.

        :param file_system:
            The file system that the directory is in. This can either be the name of the file system,
            or an instance of FileSystemProperties.
        :type file_system: str or ~azure.storage.filedatalake.FileSystemProperties
        :param directory:
            The directory with which to interact. This can either be the name of the directory,
            or an instance of DirectoryProperties.
        :type directory: str or ~azure.storage.filedatalake.DirectoryProperties
        :returns: A DataLakeDirectoryClient.
        :rtype: ~azure.storage.filedatalake.DataLakeDirectoryClient

        .. admonition:: Example:

            .. literalinclude:: ../samples/datalake_samples_service.py
                :start-after: [START get_directory_client_from_service_client]
                :end-before: [END get_directory_client_from_service_client]
                :language: python
                :dedent: 8
                :caption: Getting the directory client to interact with a specific directory.
        """
        try:
            file_system_name = file_system.name
        except AttributeError:
            file_system_name = file_system
        try:
            directory_name = directory.name
        except AttributeError:
            directory_name = directory

        _pipeline = Pipeline(
            transport=TransportWrapper(self._pipeline._transport
                                       ),  # pylint: disable = protected-access
            policies=self._pipeline.
            _impl_policies  # pylint: disable = protected-access
        )
        return DataLakeDirectoryClient(
            self.url,
            file_system_name,
            directory_name=directory_name,
            credential=self._raw_credential,
            _configuration=self._config,
            _pipeline=_pipeline,
            _hosts=self._hosts,
            require_encryption=self.require_encryption,
            key_encryption_key=self.key_encryption_key,
            key_resolver_function=self.key_resolver_function)

    def get_file_client(
        self,
        file_system,  # type: Union[FileSystemProperties, str]
        file_path  # type: Union[FileProperties, str]
    ):
        # type: (...) -> DataLakeFileClient
        """Get a client to interact with the specified file.

        The file need not already exist.

        :param file_system:
            The file system that the file is in. This can either be the name of the file system,
            or an instance of FileSystemProperties.
        :type file_system: str or ~azure.storage.filedatalake.FileSystemProperties
        :param file_path:
            The file with which to interact. This can either be the full path of the file(from the root directory),
            or an instance of FileProperties. eg. directory/subdirectory/file
        :type file_path: str or ~azure.storage.filedatalake.FileProperties
        :returns: A DataLakeFileClient.
        :rtype: ~azure.storage.filedatalake.DataLakeFileClient

        .. admonition:: Example:

            .. literalinclude:: ../samples/datalake_samples_service.py
                :start-after: [START get_file_client_from_service_client]
                :end-before: [END get_file_client_from_service_client]
                :language: python
                :dedent: 8
                :caption: Getting the file client to interact with a specific file.
        """
        try:
            file_system_name = file_system.name
        except AttributeError:
            file_system_name = file_system
        try:
            file_path = file_path.name
        except AttributeError:
            pass

        _pipeline = Pipeline(
            transport=TransportWrapper(self._pipeline._transport
                                       ),  # pylint: disable = protected-access
            policies=self._pipeline.
            _impl_policies  # pylint: disable = protected-access
        )
        return DataLakeFileClient(
            self.url,
            file_system_name,
            file_path=file_path,
            credential=self._raw_credential,
            _hosts=self._hosts,
            _configuration=self._config,
            _pipeline=_pipeline,
            require_encryption=self.require_encryption,
            key_encryption_key=self.key_encryption_key,
            key_resolver_function=self.key_resolver_function)

    def set_service_properties(self, **kwargs):
        # type: (**Any) -> None
        """Sets the properties of a storage account's Datalake service, including
        Azure Storage Analytics.

        .. versionadded:: 12.4.0
            This operation was introduced in API version '2020-06-12'.

        If an element (e.g. analytics_logging) is left as None, the
        existing settings on the service for that functionality are preserved.

        :keyword analytics_logging:
            Groups the Azure Analytics Logging settings.
        :type analytics_logging: ~azure.storage.filedatalake.AnalyticsLogging
        :keyword hour_metrics:
            The hour metrics settings provide a summary of request
            statistics grouped by API in hourly aggregates.
        :type hour_metrics: ~azure.storage.filedatalake.Metrics
        :keyword minute_metrics:
            The minute metrics settings provide request statistics
            for each minute.
        :type minute_metrics: ~azure.storage.filedatalake.Metrics
        :keyword cors:
            You can include up to five CorsRule elements in the
            list. If an empty list is specified, all CORS rules will be deleted,
            and CORS will be disabled for the service.
        :type cors: list[~azure.storage.filedatalake.CorsRule]
        :keyword str target_version:
            Indicates the default version to use for requests if an incoming
            request's version is not specified.
        :keyword delete_retention_policy:
            The delete retention policy specifies whether to retain deleted files/directories.
            It also specifies the number of days and versions of file/directory to keep.
        :type delete_retention_policy: ~azure.storage.filedatalake.RetentionPolicy
        :keyword static_website:
            Specifies whether the static website feature is enabled,
            and if yes, indicates the index document and 404 error document to use.
        :type static_website: ~azure.storage.filedatalake.StaticWebsite
        :keyword int timeout:
            The timeout parameter is expressed in seconds.
        :rtype: None
        """
        return self._blob_service_client.set_service_properties(**kwargs)  # pylint: disable=protected-access

    def get_service_properties(self, **kwargs):
        # type: (**Any) -> Dict[str, Any]
        """Gets the properties of a storage account's datalake service, including
        Azure Storage Analytics.

        .. versionadded:: 12.4.0
            This operation was introduced in API version '2020-06-12'.

        :keyword int timeout:
            The timeout parameter is expressed in seconds.
        :returns: An object containing datalake service properties such as
            analytics logging, hour/minute metrics, cors rules, etc.
        :rtype: Dict[str, Any]
        """
        props = self._blob_service_client.get_service_properties(**kwargs)  # pylint: disable=protected-access
        return get_datalake_service_properties(props)
예제 #17
0
파일: azurestorage.py 프로젝트: wjjmjh/quay
class AzureStorage(BaseStorage):
    def __init__(
        self,
        context,
        azure_container,
        storage_path,
        azure_account_name,
        azure_account_key=None,
        sas_token=None,
        connection_string=None,
    ):
        super(AzureStorage, self).__init__()
        self._context = context
        self._storage_path = storage_path.lstrip("/")

        self._azure_account_name = azure_account_name
        self._azure_account_key = azure_account_key
        self._azure_sas_token = sas_token
        self._azure_container = azure_container
        self._azure_connection_string = connection_string

        self._blob_service_client = BlobServiceClient(
            AZURE_STORAGE_URL_STRING.format(self._azure_account_name),
            credential=self._azure_account_key,
        )

        # https://docs.microsoft.com/en-us/rest/api/storageservices/understanding-block-blobs--append-blobs--and-page-blobs
        api_version = self._blob_service_client.api_version
        api_version_dt = datetime.strptime(api_version, "%Y-%m-%d")
        if api_version_dt < _API_VERSION_LIMITS["2016-05-31"][0]:
            self._max_block_size = _API_VERSION_LIMITS["2016-05-31"][1]
        elif api_version_dt <= _API_VERSION_LIMITS["2019-07-07"][0]:
            self._max_block_size = _API_VERSION_LIMITS["2019-07-07"][1]
        elif api_version_dt >= _API_VERSION_LIMITS["2019-12-12"][0]:
            self._max_block_size = _API_VERSION_LIMITS["2019-12-12"][1]
        else:
            raise Exception("Unknown Azure api version %s" % api_version)

    def _blob_name_from_path(self, object_path):
        if ".." in object_path:
            raise Exception("Relative paths are not allowed; found %s" %
                            object_path)

        return os.path.join(self._storage_path, object_path).rstrip("/")

    def _upload_blob_path_from_uuid(self, uuid):
        return self._blob_name_from_path(
            self._upload_blob_name_from_uuid(uuid))

    def _upload_blob_name_from_uuid(self, uuid):
        return "uploads/{0}".format(uuid)

    def _blob(self, blob_name):
        return self._blob_service_client.get_blob_client(
            self._azure_container, blob_name)

    @property
    def _container(self):
        return self._blob_service_client.get_container_client(
            self._azure_container)

    def get_direct_download_url(self,
                                object_path,
                                request_ip=None,
                                expires_in=60,
                                requires_cors=False,
                                head=False):
        blob_name = self._blob_name_from_path(object_path)

        try:
            sas_token = generate_blob_sas(
                self._azure_account_name,
                self._azure_container,
                blob_name,
                account_key=self._azure_account_key,
                permission=ContainerSasPermissions.from_string("r"),
                expiry=datetime.utcnow() + timedelta(seconds=expires_in),
            )

            blob_url = "{}?{}".format(self._blob(blob_name).url, sas_token)

        except AzureError:
            logger.exception(
                "Exception when trying to get direct download for path %s",
                object_path)
            raise IOError("Exception when trying to get direct download")

        return blob_url

    def validate(self, client):
        super(AzureStorage, self).validate(client)

    def get_content(self, path):
        blob_name = self._blob_name_from_path(path)
        try:
            blob_stream = self._blob(blob_name).download_blob()
        except AzureError:
            logger.exception("Exception when trying to get path %s", path)
            raise IOError("Exception when trying to get path")

        return blob_stream.content_as_bytes()

    def put_content(self, path, content):
        blob_name = self._blob_name_from_path(path)
        try:
            self._blob(blob_name).upload_blob(content,
                                              blob_type=BlobType.BlockBlob,
                                              overwrite=True)
        except AzureError:
            logger.exception("Exception when trying to put path %s", path)
            raise IOError("Exception when trying to put path")

    def stream_read(self, path):
        with self.stream_read_file(path) as f:
            while True:
                buf = f.read(self.buffer_size)
                if not buf:
                    break
                yield buf

    def stream_read_file(self, path):
        blob_name = self._blob_name_from_path(path)

        try:
            output_stream = io.BytesIO()
            self._blob(blob_name).download_blob().download_to_stream(
                output_stream)
            output_stream.seek(0)
        except AzureError:
            logger.exception(
                "Exception when trying to stream_file_read path %s", path)
            raise IOError("Exception when trying to stream_file_read path")

        return output_stream

    def stream_write(self, path, fp, content_type=None, content_encoding=None):
        blob_name = self._blob_name_from_path(path)
        content_settings = ContentSettings(
            content_type=content_type,
            content_encoding=content_encoding,
        )

        try:
            self._blob(blob_name).upload_blob(
                fp, content_settings=content_settings, overwrite=True)
        except AzureError as ae:
            logger.exception("Exception when trying to stream_write path %s",
                             path)
            raise IOError("Exception when trying to stream_write path", ae)

    def exists(self, path):
        blob_name = self._blob_name_from_path(path)

        try:
            self._blob(blob_name).get_blob_properties()
        except ResourceNotFoundError:
            return False
        except AzureError:
            logger.exception("Exception when trying to check exists path %s",
                             path)
            raise IOError("Exception when trying to check exists path")

        return True

    def remove(self, path):
        blob_name = self._blob_name_from_path(path)
        try:
            self._blob(blob_name).delete_blob()
        except AzureError:
            logger.exception("Exception when trying to remove path %s", path)
            raise IOError("Exception when trying to remove path")

    def get_checksum(self, path):
        blob_name = self._blob_name_from_path(path)
        try:
            blob_properties = self._blob(blob_name).get_blob_properties()
        except AzureError:
            logger.exception(
                "Exception when trying to get_checksum for path %s", path)
            raise IOError("Exception when trying to get_checksum path")
        return blob_properties.etag

    def initiate_chunked_upload(self):
        random_uuid = str(uuid.uuid4())
        metadata = {
            _BLOCKS_KEY: [],
            _CONTENT_TYPE_KEY: None,
        }
        return random_uuid, metadata

    def stream_upload_chunk(self,
                            uuid,
                            offset,
                            length,
                            in_fp,
                            storage_metadata,
                            content_type=None):
        if length == 0:
            return 0, storage_metadata, None

        upload_blob_path = self._upload_blob_path_from_uuid(uuid)
        new_metadata = copy.deepcopy(storage_metadata)

        total_bytes_written = 0

        while True:
            current_length = length - total_bytes_written
            max_length = (min(current_length, self._max_block_size) if
                          length != READ_UNTIL_END else self._max_block_size)
            if max_length <= 0:
                break

            limited = LimitingStream(in_fp, max_length, seekable=False)

            # Note: Azure fails if a zero-length block is uploaded, so we read all the data here,
            # and, if there is none, terminate early.
            block_data = b""
            for chunk in iter(lambda: limited.read(31457280), b""):
                block_data += chunk

            if len(block_data) == 0:
                break

            block_index = len(new_metadata[_BLOCKS_KEY])
            block_id = format(block_index, "05")
            new_metadata[_BLOCKS_KEY].append(block_id)

            try:
                self._blob(upload_blob_path).stage_block(block_id,
                                                         block_data,
                                                         validate_content=True)
            except AzureError as ae:
                logger.exception(
                    "Exception when trying to stream_upload_chunk block %s for %s",
                    block_id, uuid)
                return total_bytes_written, new_metadata, ae

            bytes_written = len(block_data)
            total_bytes_written += bytes_written
            if bytes_written == 0 or bytes_written < max_length:
                break

        if content_type is not None:
            new_metadata[_CONTENT_TYPE_KEY] = content_type

        return total_bytes_written, new_metadata, None

    def complete_chunked_upload(self, uuid, final_path, storage_metadata):
        """
        Complete the chunked upload and store the final results in the path indicated.

        Returns nothing.
        """
        # Commit the blob's blocks.
        upload_blob_name = self._upload_blob_name_from_uuid(
            uuid)  # upload/<uuid>
        upload_blob_path = self._upload_blob_path_from_uuid(
            uuid)  # storage/path/upload/<uuid>
        block_list = [
            BlobBlock(block_id) for block_id in storage_metadata[_BLOCKS_KEY]
        ]

        try:
            if storage_metadata[_CONTENT_TYPE_KEY] is not None:
                content_settings = ContentSettings(
                    content_type=storage_metadata[_CONTENT_TYPE_KEY])
                self._blob(upload_blob_path).commit_block_list(
                    block_list, content_settings=content_settings)
            else:
                self._blob(upload_blob_path).commit_block_list(block_list)
        except AzureError:
            logger.exception(
                "Exception when trying to put block list for path %s from upload %s",
                final_path,
                uuid,
            )
            raise IOError("Exception when trying to put block list")

        # Copy the blob to its final location.
        upload_blob_name = self._upload_blob_name_from_uuid(uuid)
        copy_source_url = self.get_direct_download_url(upload_blob_name,
                                                       expires_in=300)

        try:
            final_blob_name = self._blob_name_from_path(final_path)
            cp = self._blob(final_blob_name).start_copy_from_url(
                copy_source_url)
        except AzureError:
            logger.exception(
                "Exception when trying to set copy uploaded blob %s to path %s",
                uuid, final_path)
            raise IOError("Exception when trying to copy uploaded blob")

        self._await_copy(final_blob_name)

        # Delete the original blob.
        logger.debug("Deleting chunked upload %s at path %s", uuid,
                     upload_blob_path)
        try:
            self._blob(upload_blob_path).delete_blob()
        except AzureError:
            logger.exception(
                "Exception when trying to set delete uploaded blob %s", uuid)
            raise IOError("Exception when trying to delete uploaded blob")

    def cancel_chunked_upload(self, uuid, storage_metadata):
        """
        Cancel the chunked upload and clean up any outstanding partially uploaded data.

        Returns nothing.
        """
        upload_blob_path = self._upload_blob_path_from_uuid(uuid)
        logger.debug("Canceling chunked upload %s at path %s", uuid,
                     upload_blob_path)
        try:
            self._blob(upload_blob_path).delete_blob()
        except ResourceNotFoundError:
            pass

    def _await_copy(self, blob_name):
        # Poll for copy completion.
        blob = self._blob(blob_name)
        copy_prop = blob.get_blob_properties().copy

        count = 0
        while copy_prop.status == "pending":
            props = blob.get_blob_properties()
            copy_prop = props.copy

            if copy_prop.status == "success":
                return

            if copy_prop.status == "failed" or copy_prop.status == "aborted":
                raise IOError("Copy of blob %s failed with status %s" %
                              (blob_name, copy_prop.status))

            count = count + 1
            if count > _MAX_COPY_POLL_COUNT:
                raise IOError("Timed out waiting for copy to complete")

            time.sleep(_COPY_POLL_SLEEP)

    def copy_to(self, destination, path):
        if self.__class__ == destination.__class__:
            logger.debug(
                "Starting copying file from Azure %s to Azure %s via an Azure copy",
                self._azure_container,
                destination._azure_container,
            )
            copy_source_url = self.get_direct_download_url(path)
            blob_name = destination._blob_name_from_path(path)
            dest_blob = destination._blob(blob_name)

            destination._blob(blob_name).start_copy_from_url(copy_source_url)
            destination._await_copy(blob_name)
            logger.debug(
                "Finished copying file from Azure %s to Azure %s via an Azure copy",
                self._azure_container,
                destination._azure_container,
            )
            return

        # Fallback to a slower, default copy.
        logger.debug(
            "Copying file from Azure container %s to %s via a streamed copy",
            self._azure_container,
            destination,
        )
        with self.stream_read_file(path) as fp:
            destination.stream_write(path, fp)

    def setup(self):
        # From: https://docs.microsoft.com/en-us/rest/api/storageservices/cross-origin-resource-sharing--cors--support-for-the-azure-storage-services
        cors = [
            CorsRule(
                allowed_origins="*",
                allowed_methods=["GET", "PUT"],
                max_age_in_seconds=3000,
                exposed_headers=["x-ms-meta-*"],
                allowed_headers=[
                    "x-ms-meta-data*",
                    "x-ms-meta-target*",
                    "x-ms-meta-abc",
                    "Content-Type",
                ],
            )
        ]

        self._blob_service_client.set_service_properties(cors=cors)