Ejemplo n.º 1
0
    def test_notify_though_storage_queue(self):
        account = self.setup_account()

        # Create queue, make sure it is empty
        queue_url = "https://" + account.name + ".queue.core.windows.net/testnotify"
        queue, name = StorageUtilities.get_queue_client_by_uri(queue_url, self.session)
        queue.clear_messages(name)

        p = self.load_policy({
            'name': 'test-notify-for-keyvault',
            'resource': 'azure.keyvault',
            'actions': [
                {'type': 'notify',
                 'template': 'default',
                 'priority_header': '2',
                 'subject': 'testing notify action',
                 'to': ['*****@*****.**'],
                 'transport':
                     {'type': 'asq',
                      'queue': queue_url}
                 }
            ]
        })
        resources = p.run()
        self.assertEqual(len(resources), 1)

        # Pull messages, should be 1
        messages = StorageUtilities.get_queue_messages(queue, name)
        self.assertEqual(len(messages), 1)
 def setUp(self):
     super(AzureEventSubscriptionsTest, self).setUp()
     account = self.setup_account()
     queue_name = 'cctesteventsub'
     StorageUtilities.create_queue_from_storage_account(account, queue_name)
     event_sub_destination = StorageQueueEventSubscriptionDestination(
         resource_id=account.id, queue_name=queue_name)
     AzureEventSubscription.create(event_sub_destination, self.event_sub_name)
 def test_create_azure_event_subscription(self):
     account = self.setup_account()
     queue_name = 'cctestevensub'
     StorageUtilities.create_queue_from_storage_account(account, queue_name)
     sub_destination = StorageQueueEventSubscriptionDestination(resource_id=account.id,
                                                                queue_name=queue_name)
     sub_name = 'custodiantestsubscription'
     event_subscription = AzureEventSubscription.create(sub_destination, sub_name)
     self.assertEqual(event_subscription.name, sub_name)
     self.assertEqual(event_subscription.destination.endpoint_type, 'StorageQueue')
Ejemplo n.º 4
0
    def _create_storage_queue(self, queue_name, session):
        self.log.info("Creating storage queue")
        storage_client = session.client('azure.mgmt.storage.StorageManagementClient')
        storage_account = storage_client.storage_accounts.get_properties(
            self.function_params.storage_account['resource_group_name'],
            self.function_params.storage_account['name'])

        try:
            StorageUtilities.create_queue_from_storage_account(storage_account, queue_name, session)
            self.log.info("Storage queue creation succeeded")
            return storage_account
        except Exception as e:
            self.log.error('Queue creation failed with error: %s' % e)
            raise SystemExit
Ejemplo n.º 5
0
 def test_get_storage_client_by_uri(self):
     account = self.setup_account()
     url = "https://" + account.name + ".blob.core.windows.net/testcontainer/extrafolder"
     blob_service, container_name, key_prefix = StorageUtilities.get_blob_client_by_uri(url)
     self.assertIsNotNone(blob_service)
     self.assertEqual(container_name, "testcontainer")
     self.assertEqual(key_prefix, "extrafolder")
    def test_get_blob_client_from_storage_account_without_sas(self):
        account = self.setup_account()
        resource_group = ResourceIdParser.get_resource_group(account.id)
        blob_client = StorageUtilities.get_blob_client_from_storage_account(
            resource_group,
            account.name,
            self.session)

        self.assertIsNotNone(blob_client)
Ejemplo n.º 7
0
 def process(self, resources, event=None):
     session = local_session(self.manager.session_factory)
     token = StorageUtilities.get_storage_token(session)
     result, errors = ThreadHelper.execute_in_parallel(
         resources=resources,
         event=event,
         execution_method=self.process_resource_set,
         executor_factory=self.executor_factory,
         log=self.log,
         session=session,
         token=token
     )
     return result
    def test_get_blob_client_from_storage_account_without_sas_fails_sas_generation(self):
        with self.assertRaises(ValueError):
            account = self.setup_account()
            resource_group = ResourceIdParser.get_resource_group(account.id)
            blob_client = StorageUtilities.get_blob_client_from_storage_account(
                resource_group,
                account.name,
                self.session)

            # create container for package
            blob_client.create_container('test')
            blob_client.create_blob_from_text('test', 'test.txt', 'My test contents.')
            blob_client.generate_blob_shared_access_signature('test', 'test.txt')
    def run(self, parallel=False):
        if parallel:
            self.logger.info("Parallel processing with Azure Queue is not yet implemented.")

        self.logger.info("Downloading messages from the Azure Storage queue.")
        queue_settings = StorageUtilities.get_queue_client_by_uri(self.receive_queue, self.session)
        queue_messages = StorageUtilities.get_queue_messages(
            *queue_settings, num_messages=self.batch_size)

        while len(queue_messages) > 0:
            for queue_message in queue_messages:
                self.logger.debug("Message id: %s received" % queue_message.id)

                if (self.process_azure_queue_message(queue_message) or
                        queue_message.dequeue_count > self.max_message_retry):
                    # If message handled successfully or max retry hit, delete
                    StorageUtilities.delete_queue_message(*queue_settings, message=queue_message)

            queue_messages = StorageUtilities.get_queue_messages(
                *queue_settings, num_messages=self.batch_size)

        self.logger.info('No messages left on the azure storage queue, exiting c7n_mailer.')
Ejemplo n.º 10
0
    def test_notify_though_storage_queue(self):
        account = self.setup_account()

        # Create queue, make sure it is empty
        queue_url = "https://" + account.name + ".queue.core.windows.net/testnotify"
        queue, name = StorageUtilities.get_queue_client_by_uri(
            queue_url, self.session)
        queue.clear_messages(name)

        p = self.load_policy({
            'name':
            'test-notify-for-keyvault',
            'resource':
            'azure.keyvault',
            'filters': [{
                'type': 'value',
                'key': 'name',
                'op': 'glob',
                'value_type': 'normalize',
                'value': 'cckeyvault1*'
            }],
            'actions': [{
                'type': 'notify',
                'template': 'default',
                'priority_header': '2',
                'subject': 'testing notify action',
                'to': ['*****@*****.**'],
                'transport': {
                    'type': 'asq',
                    'queue': queue_url
                }
            }]
        })
        resources = p.run()
        self.assertEqual(len(resources), 1)

        # Pull messages, should be 1
        messages = StorageUtilities.get_queue_messages(queue, name)
        self.assertEqual(len(messages), 1)
Ejemplo n.º 11
0
    def run(self, parallel=False):
        if parallel:
            self.logger.info("Parallel processing with Azure Queue is not yet implemented.")

        self.logger.info("Downloading messages from the Azure Storage queue.")
        queue_settings = StorageUtilities.get_queue_client_by_uri(self.receive_queue, self.session)
        queue_messages = StorageUtilities.get_queue_messages(
            *queue_settings, num_messages=self.batch_size)

        while len(queue_messages) > 0:
            for queue_message in queue_messages:
                self.logger.debug("Message id: %s received" % queue_message.id)

                if (self.process_azure_queue_message(queue_message) or
                        queue_message.dequeue_count > self.max_message_retry):
                    # If message handled successfully or max retry hit, delete
                    StorageUtilities.delete_queue_message(*queue_settings, message=queue_message)

            queue_messages = StorageUtilities.get_queue_messages(
                *queue_settings, num_messages=self.batch_size)

        self.logger.info('No messages left on the azure storage queue, exiting c7n_mailer.')
Ejemplo n.º 12
0
    def test_disable_log_settings(self):
        p = self.load_policy(
            {
                'name':
                'test-azure-storage',
                'resource':
                'azure.storage',
                'filters': [{
                    'type': 'value',
                    'key': 'name',
                    'op': 'glob',
                    'value_type': 'normalize',
                    'value': 'cclgstorage*'
                }],
                'actions': [{
                    'type': 'set-log-settings',
                    'storage-types': ['blob', 'queue', 'table'],
                    'retention': 5,
                    'log': ['delete']
                }]
            },
            validate=True)

        resources = p.run()

        self.sleep_in_live_mode(30)

        session = local_session(p.session_factory)
        token = StorageUtilities.get_storage_token(session)
        blob_settings = StorageSettingsUtilities.get_settings(BLOB_TYPE,
                                                              resources[0],
                                                              token=token)
        queue_settings = StorageSettingsUtilities.get_settings(QUEUE_TYPE,
                                                               resources[0],
                                                               token=token)
        table_settings = StorageSettingsUtilities.get_settings(TABLE_TYPE,
                                                               resources[0],
                                                               session=session)

        # assert read and write logging settings are disabled
        self.assertFalse(blob_settings.logging.read
                         and blob_settings.logging.write)
        self.assertFalse(queue_settings.logging.read
                         and queue_settings.logging.write)
        self.assertFalse(table_settings.logging.read
                         and table_settings.logging.write)

        # assert delete logging settings are enabled
        self.assertTrue(blob_settings.logging.delete)
        self.assertTrue(queue_settings.logging.delete)
        self.assertTrue(table_settings.logging.delete)
Ejemplo n.º 13
0
    def test_get_storage_primary_key(self):
        key1 = StorageAccountKey()
        key1.key_name = "key1"
        key1.value = "mock_storage_key"

        data = StorageAccountListKeysResult()
        data.keys = [key1]

        with patch(self._get_storage_client_string() + '.list_keys', return_value=data) \
                as list_keys_mock:
            primary_key = StorageUtilities.get_storage_primary_key(
                'mock_rg_group', 'mock_account', self.session)
            list_keys_mock.assert_called_with('mock_rg_group', 'mock_account')
            self.assertEqual(primary_key, data.keys[0].value)
Ejemplo n.º 14
0
    def test_get_blob_client_from_storage_account_without_sas_fails_sas_generation(
            self):
        with self.assertRaises(ValueError):
            account = self.setup_account()
            resource_group = ResourceIdParser.get_resource_group(account.id)
            blob_client = StorageUtilities.get_blob_client_from_storage_account(
                resource_group, account.name, self.session)

            # create container for package
            blob_client.create_container('test')
            blob_client.create_blob_from_text('test', 'test.txt',
                                              'My test contents.')
            blob_client.generate_blob_shared_access_signature(
                'test', 'test.txt')
Ejemplo n.º 15
0
    def test_get_blob_client_from_storage_account_with_sas(self):
        account = self.setup_account()
        resource_group = ResourceIdParser.get_resource_group(account.id)
        blob_client = StorageUtilities.get_blob_client_from_storage_account(
            resource_group, account.name, self.session, True)

        # create sas token for blob
        blob_client.create_container('test')
        blob_client.create_blob_from_text('test', 'test.txt',
                                          'My test contents.')
        sas = blob_client.generate_blob_shared_access_signature(
            'test', 'test.txt')

        self.assertIsNotNone(sas)
Ejemplo n.º 16
0
    def test_get_blob_client_from_storage_account_with_sas(self):
        account = self.setup_account()
        resource_group = ResourceIdParser.get_resource_group(account.id)
        blob_client = StorageUtilities.get_blob_client_from_storage_account(
            resource_group,
            account.name,
            self.session,
            True)

        # create sas token for blob
        blob_client.create_container('test')
        blob_client.create_blob_from_text('test', 'test.txt', 'My test contents.')
        sas = blob_client.generate_blob_shared_access_signature('test', 'test.txt')

        self.assertIsNotNone(sas)
Ejemplo n.º 17
0
    def prepare_queue_storage(self, queue_resource_id, queue_name):
        """
        Create a storage client using unusual ID/group reference
        as this is what we require for event subscriptions
        """

        # Use a different session object if the queue is in a different subscription
        queue_subscription_id = ResourceIdParser.get_subscription_id(
            queue_resource_id)
        if queue_subscription_id != self.session.subscription_id:
            session = Session(queue_subscription_id)
        else:
            session = self.session

        storage_client = session.client(
            'azure.mgmt.storage.StorageManagementClient')

        account = storage_client.storage_accounts.get_properties(
            ResourceIdParser.get_resource_group(queue_resource_id),
            ResourceIdParser.get_resource_name(queue_resource_id))

        Storage.create_queue_from_storage_account(account, queue_name,
                                                  self.session)
        return account
Ejemplo n.º 18
0
    def test_cycle_queue_message_by_uri(self):
        account = self.setup_account()
        url = "https://" + account.name + ".queue.core.windows.net/testcyclemessage"

        queue_settings = StorageUtilities.get_queue_client_by_uri(url, self.session)
        StorageUtilities.put_queue_message(*queue_settings, content=u"hello queue")

        # Pull messages, should be 1
        messages = StorageUtilities.get_queue_messages(*queue_settings)
        self.assertEqual(len(messages), 1)

        # Read message and delete it from queue
        for message in messages:
            self.assertEqual(message.content, u"hello queue")
            StorageUtilities.delete_queue_message(*queue_settings, message=message)

        # Pull messages again, should be empty
        messages = StorageUtilities.get_queue_messages(*queue_settings)
        self.assertEqual(len(messages), 0)
Ejemplo n.º 19
0
    def poll_queue(self):
        """
        Poll the Azure queue and loop until
        there are no visible messages remaining.
        """
        # Exit if we don't have any policies
        if not self.policies:
            return

        if not self.queue_service:
            self.queue_service = Storage.get_queue_client_by_storage_account(
                self.queue_storage_account,
                self.storage_session)

        while True:
            try:
                messages = Storage.get_queue_messages(
                    self.queue_service,
                    self.event_queue_name,
                    num_messages=queue_message_count,
                    visibility_timeout=queue_timeout_seconds)
            except AzureHttpError:
                self.queue_service = None
                raise

            if len(messages) == 0:
                break

            log.info('Pulled %s events to process while polling queue.' % len(messages))

            for message in messages:
                if message.dequeue_count > max_dequeue_count:
                    Storage.delete_queue_message(self.queue_service,
                                                 self.event_queue_name,
                                                 message=message)
                    log.warning("Event deleted due to reaching maximum retry count.")
                else:
                    # Run matching policies
                    self.run_policies_for_event(message)

                    # We delete events regardless of policy result
                    Storage.delete_queue_message(
                        self.queue_service,
                        self.event_queue_name,
                        message=message)
Ejemplo n.º 20
0
 def test_create_queue_from_storage_account(self):
     account = self.setup_account()
     queue_name = 'testqueuecc'
     queue = \
         StorageUtilities.create_queue_from_storage_account(account, queue_name, self.session)
     self.assertTrue(queue)
Ejemplo n.º 21
0
    def publish_functions_package(cls, function_params, package):
        session = local_session(Session)
        web_client = session.client('azure.mgmt.web.WebSiteManagementClient')

        cls.log.info('Publishing Function application')

        # provision using Kudu Zip-Deploy
        if not cls.is_consumption_plan(function_params):
            publish_creds = web_client.web_apps.list_publishing_credentials(
                function_params.function_app_resource_group_name,
                function_params.function_app_name).result()

            if package.wait_for_status(publish_creds):
                package.publish(publish_creds)
            else:
                cls.log.error("Aborted deployment, ensure Application Service is healthy.")
        # provision using WEBSITE_RUN_FROM_PACKAGE
        else:
            # fetch blob client
            blob_client = StorageUtilities.get_blob_client_from_storage_account(
                function_params.storage_account['resource_group_name'],
                function_params.storage_account['name'],
                session,
                sas_generation=True
            )

            # create container for package
            blob_client.create_container(FUNCTION_CONSUMPTION_BLOB_CONTAINER)

            # upload package
            blob_name = '%s.zip' % function_params.function_app_name
            packageToPublish = package.pkg.get_stream()
            count = os.path.getsize(package.pkg.path)

            blob_client.create_blob_from_stream(
                FUNCTION_CONSUMPTION_BLOB_CONTAINER, blob_name, packageToPublish, count)
            packageToPublish.close()

            # create blob url for package
            sas = blob_client.generate_blob_shared_access_signature(
                FUNCTION_CONSUMPTION_BLOB_CONTAINER,
                blob_name,
                BlobPermissions.READ,
                datetime.datetime.utcnow() +
                datetime.timedelta(days=FUNCTION_PACKAGE_SAS_EXPIRY_DAYS)
                # expire in 10 years
            )
            blob_url = blob_client.make_blob_url(
                FUNCTION_CONSUMPTION_BLOB_CONTAINER,
                blob_name,
                sas_token=sas)

            # update application settings function package
            app_settings = web_client.web_apps.list_application_settings(
                function_params.function_app_resource_group_name,
                function_params.function_app_name)
            app_settings.properties['WEBSITE_RUN_FROM_PACKAGE'] = blob_url
            web_client.web_apps.update_application_settings(
                function_params.function_app_resource_group_name,
                function_params.function_app_name,
                kind=str,
                properties=app_settings.properties
            )

        # sync the scale controller for the Function App
        if not cls._sync_function_triggers(function_params):
            cls.log.error("Unable to sync triggers...")

        cls.log.info('Finished publishing Function application')
Ejemplo n.º 22
0
 def get_blob_client_wrapper(output_path, ctx):
     # provides easier test isolation
     s = local_session(ctx.session_factory)
     return StorageUtilities.get_blob_client_by_uri(output_path, s)
Ejemplo n.º 23
0
 def test_get_queue_client_by_uri(self):
     account = self.setup_account()
     url = "https://" + account.name + ".queue.core.windows.net/testcc"
     queue_service, queue_name = StorageUtilities.get_queue_client_by_uri(url, self.session)
     self.assertIsNotNone(queue_service)
     self.assertEqual(queue_name, "testcc")
Ejemplo n.º 24
0
 def test_get_storage_token(self):
     token = StorageUtilities.get_storage_token(self.session)
     self.assertIsNotNone(token.token)
Ejemplo n.º 25
0
 def test_get_account_by_name(self):
     account = self.setup_account()
     found = StorageUtilities.get_storage_account_by_name(account.name)
     self.assertEqual(found.id, account.id)
Ejemplo n.º 26
0
 def process_in_parallel(self, resources, event):
     self.token = StorageUtilities.get_storage_token(self.session)
     return super(SetLogSettingsAction,
                  self).process_in_parallel(resources, event)
Ejemplo n.º 27
0
 def get_blob_client_wrapper(output_path):
     # provides easier test isolation
     return StorageUtilities.get_blob_client_by_uri(output_path)
Ejemplo n.º 28
0
    def update_policies(self):
        """
        Enumerate all policies from storage.
        Use the MD5 hashes in the enumerated policies
        and a local dictionary to decide if we should
        bother downloading/updating each blob.
        We maintain an on-disk policy cache for future
        features.
        """
        if not self.policy_blob_client:
            self.policy_blob_client = Storage.get_blob_client_by_uri(
                self.policy_storage_uri, self.storage_session)
        (client, container, prefix) = self.policy_blob_client

        try:
            # All blobs with YAML extension
            blobs = [
                b for b in client.list_blobs(container)
                if Host.has_yaml_ext(b.name)
            ]
        except AzureHttpError as e:
            # If blob methods are failing don't keep
            # a cached client
            self.policy_blob_client = None
            raise e

        # Filter to hashes we have not seen before
        new_blobs = self._get_new_blobs(blobs)

        # Get all YAML files on disk that are no longer in blob storage
        cached_policy_files = [
            f for f in os.listdir(self.policy_cache) if Host.has_yaml_ext(f)
        ]

        removed_files = [
            f for f in cached_policy_files if f not in [b.name for b in blobs]
        ]

        if not (removed_files or new_blobs):
            return

        # Update a copy so we don't interfere with
        # iterations on other threads
        policies_copy = self.policies.copy()

        for f in removed_files:
            path = os.path.join(self.policy_cache, f)
            self.unload_policy_file(path, policies_copy)

        # Get updated YML files
        for blob in new_blobs:
            policy_path = os.path.join(self.policy_cache, blob.name)
            if os.path.exists(policy_path):
                self.unload_policy_file(policy_path, policies_copy)
            elif not os.path.isdir(os.path.dirname(policy_path)):
                os.makedirs(os.path.dirname(policy_path))

            client.get_blob_to_path(container, blob.name, policy_path)
            self.load_policy(policy_path, policies_copy)
            self.blob_cache.update(
                {blob.name: blob.properties.content_settings.content_md5})

        # Assign our copy back over the original
        self.policies = policies_copy
Ejemplo n.º 29
0
 def test_get_account_by_name_not_exists(self):
     account = self.setup_account()
     found = StorageUtilities.get_storage_account_by_name(account.name + "break")
     self.assertIsNone(found)
Ejemplo n.º 30
0
    def publish_functions_package(cls, function_params, package):
        session = local_session(Session)
        web_client = session.client('azure.mgmt.web.WebSiteManagementClient')

        cls.log.info('Publishing Function application')

        # provision using Kudu Zip-Deploy
        if not cls.is_consumption_plan(function_params):
            publish_creds = web_client.web_apps.list_publishing_credentials(
                function_params.function_app_resource_group_name,
                function_params.function_app_name).result()

            if package.wait_for_status(publish_creds):
                package.publish(publish_creds)
            else:
                cls.log.error(
                    "Aborted deployment, ensure Application Service is healthy."
                )
        # provision using WEBSITE_RUN_FROM_PACKAGE
        else:
            # fetch blob client
            blob_client = StorageUtilities.get_blob_client_from_storage_account(
                function_params.storage_account['resource_group_name'],
                function_params.storage_account['name'],
                session,
                sas_generation=True)

            # create container for package
            blob_client.create_container(FUNCTION_CONSUMPTION_BLOB_CONTAINER)

            # upload package
            blob_name = '%s.zip' % function_params.function_app_name
            blob_client.create_blob_from_path(
                FUNCTION_CONSUMPTION_BLOB_CONTAINER, blob_name,
                package.pkg.path)

            # create blob url for package
            sas = blob_client.generate_blob_shared_access_signature(
                FUNCTION_CONSUMPTION_BLOB_CONTAINER, blob_name,
                BlobPermissions.READ,
                datetime.datetime.utcnow() +
                datetime.timedelta(days=FUNCTION_PACKAGE_SAS_EXPIRY_DAYS)
                # expire in 10 years
            )
            blob_url = blob_client.make_blob_url(
                FUNCTION_CONSUMPTION_BLOB_CONTAINER, blob_name, sas_token=sas)

            # update application settings function package
            app_settings = web_client.web_apps.list_application_settings(
                function_params.function_app_resource_group_name,
                function_params.function_app_name)
            app_settings.properties['WEBSITE_RUN_FROM_PACKAGE'] = blob_url
            web_client.web_apps.update_application_settings(
                function_params.function_app_resource_group_name,
                function_params.function_app_name,
                kind=str,
                properties=app_settings.properties)

        # sync the scale controller for the Function App
        if not cls._sync_function_triggers(function_params):
            cls.log.error("Unable to sync triggers...")

        cls.log.info('Finished publishing Function application')
Ejemplo n.º 31
0
 def test_get_keys(self):
     account = self.setup_account()
     keys = StorageUtilities.get_storage_keys(account.id)
     self.assertEqual(len(keys), 2)
Ejemplo n.º 32
0
 def test_get_storage_client_by_uri(self):
     account = self.setup_account()
     url = "https://" + account.name + ".blob.core.windows.net/testcontainer"
     blob_service, container_name = StorageUtilities.get_blob_client_by_uri(url)
     self.assertIsNotNone(blob_service)
     self.assertIsNotNone(container_name)
Ejemplo n.º 33
0
 def test_get_storage_token(self):
     token = StorageUtilities.get_storage_token(self.session)
     self.assertIsNotNone(token.token)
Ejemplo n.º 34
0
    def _get_table_client_from_storage_account(storage_account, session):
        primary_key = StorageUtilities.get_storage_primary_key(
            storage_account['resourceGroup'], storage_account['name'], session)

        return TableService(account_name=storage_account['name'],
                            account_key=primary_key)
Ejemplo n.º 35
0
 def send_to_azure_queue(self, queue_uri, message, session):
     queue_service, queue_name = StorageUtilities.get_queue_client_by_uri(queue_uri, session)
     return StorageUtilities.put_queue_message(queue_service, queue_name, self.pack(message)).id
Ejemplo n.º 36
0
 def send_to_azure_queue(self, queue_uri, message, session):
     queue_service, queue_name = StorageUtilities.get_queue_client_by_uri(
         queue_uri, session)
     return StorageUtilities.put_queue_message(queue_service, queue_name,
                                               self.pack(message)).id
Ejemplo n.º 37
0
 def get_blob_client_wrapper(output_path, ctx):
     # provides easier test isolation
     s = local_session(ctx.session_factory)
     return StorageUtilities.get_blob_client_by_uri(output_path, s)
Ejemplo n.º 38
0
 def test_get_account_by_name(self):
     account = self.setup_account()
     found = StorageUtilities.get_storage_account_by_name(account.name)
     self.assertEqual(found.id, account.id)
Ejemplo n.º 39
0
 def test_get_keys(self):
     account = self.setup_account()
     keys = StorageUtilities.get_storage_keys(account.id)
     self.assertEqual(len(keys), 2)
Ejemplo n.º 40
0
 def test_get_account_by_name_not_exists(self):
     account = self.setup_account()
     found = StorageUtilities.get_storage_account_by_name(account.name + "break")
     self.assertIsNone(found)