Esempio n. 1
0
    def test_account_sas(self, resource_group, location, storage_account, storage_account_key):
        # SAS URL is calculated from storage key, so this test runs live only

        # Arrange
        qsc = QueueServiceClient(self.account_url(storage_account, "queue"), storage_account_key)
        queue_client = self._get_queue_reference(qsc)
        queue_client.create_queue()
        queue_client.send_message(u'message1')
        token = generate_account_sas(
            qsc.account_name,
            qsc.credential.account_key,
            ResourceTypes(object=True),
            AccountSasPermissions(read=True),
            datetime.utcnow() + timedelta(hours=1),
            datetime.utcnow() - timedelta(minutes=5)
        )

        # Act
        service = QueueServiceClient(
            account_url=qsc.url,
            credential=token,
        )
        new_queue_client = service.get_queue_client(queue_client.queue_name)
        result = new_queue_client.peek_messages()

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)
Esempio n. 2
0
 def test_queue_not_exists(self, resource_group, location, storage_account, storage_account_key):
     # Arrange
     qsc = QueueServiceClient(self.account_url(storage_account, "queue"), storage_account_key)
     queue = qsc.get_queue_client(self.get_resource_name('missing'))
     # Act
     with self.assertRaises(ResourceNotFoundError):
         queue.get_queue_properties()
    def test_account_sas(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        token = self.qsc.generate_shared_access_signature(
            ResourceTypes.OBJECT, AccountPermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
            datetime.utcnow() - timedelta(minutes=5))

        # Act
        service = QueueServiceClient(
            account_url=self.qsc.url,
            credential=token,
        )
        new_queue_client = service.get_queue_client(queue_client.queue_name)
        result = new_queue_client.peek_messages()

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)
    def ingest_from_blob(self, blob_descriptor: BlobDescriptor,
                         ingestion_properties: IngestionProperties):
        """
        Enqueue an ingest command from azure blobs.
        To learn more about ingestion methods go to:
        https://docs.microsoft.com/en-us/azure/data-explorer/ingest-data-overview#ingestion-methods
        :param azure.kusto.ingest.BlobDescriptor blob_descriptor: An object that contains a description of the blob to be ingested.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
        """
        try:
            queues = self._resource_manager.get_ingestion_queues()
        except KustoServiceError as ex:
            self._validate_endpoint_service_type()
            raise ex

        random_queue = random.choice(queues)
        queue_service = QueueServiceClient(random_queue.account_uri)
        authorization_context = self._resource_manager.get_authorization_context(
        )
        ingestion_blob_info = _IngestionBlobInfo(
            blob_descriptor,
            ingestion_properties=ingestion_properties,
            auth_context=authorization_context)
        ingestion_blob_info_json = ingestion_blob_info.to_json()
        # TODO: perhaps this needs to be more visible
        content = ingestion_blob_info_json
        queue_client = queue_service.get_queue_client(
            queue=random_queue.object_name,
            message_encode_policy=TextBase64EncodePolicy())
        queue_client.send_message(content=content)
Esempio n. 5
0
    def ingest_from_blob(
            self, blob_descriptor: BlobDescriptor,
            ingestion_properties: IngestionProperties) -> IngestionResult:
        """Enqueue an ingest command from azure blobs.
        To learn more about ingestion methods go to:
        https://docs.microsoft.com/en-us/azure/data-explorer/ingest-data-overview#ingestion-methods
        :param azure.kusto.ingest.BlobDescriptor blob_descriptor: An object that contains a description of the blob to be ingested.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
        """
        try:
            queues = self._resource_manager.get_ingestion_queues()
        except KustoServiceError as ex:
            self._validate_endpoint_service_type()
            raise ex

        random_queue = random.choice(queues)
        queue_service = QueueServiceClient(random_queue.account_uri,
                                           proxies=self._proxy_dict)
        authorization_context = self._resource_manager.get_authorization_context(
        )
        ingestion_blob_info = IngestionBlobInfo(
            blob_descriptor,
            ingestion_properties=ingestion_properties,
            auth_context=authorization_context)
        ingestion_blob_info_json = ingestion_blob_info.to_json()
        queue_client = queue_service.get_queue_client(
            queue=random_queue.object_name,
            message_encode_policy=TextBase64EncodePolicy())
        queue_client.send_message(content=ingestion_blob_info_json,
                                  timeout=self._SERVICE_CLIENT_TIMEOUT_SECONDS)

        return IngestionResult(IngestionStatus.QUEUED,
                               ingestion_properties.database,
                               ingestion_properties.table,
                               blob_descriptor.source_id, blob_descriptor.path)
Esempio n. 6
0
    def test_account_sas(self, resource_group, location, storage_account,
                         storage_account_key):
        # SAS URL is calculated from storage key, so this test runs live only
        if not self.is_live:
            return

        # Arrange
        qsc = QueueServiceClient(self._account_url(storage_account.name),
                                 storage_account_key)
        queue_client = self._get_queue_reference(qsc)
        queue_client.create_queue()
        queue_client.enqueue_message(u'message1')
        token = qsc.generate_shared_access_signature(
            ResourceTypes.OBJECT, AccountPermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
            datetime.utcnow() - timedelta(minutes=5))

        # Act
        service = QueueServiceClient(
            account_url=qsc.url,
            credential=token,
        )
        new_queue_client = service.get_queue_client(queue_client.queue_name)
        result = new_queue_client.peek_messages()

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)
    def test_message_text_xml_whitespace(self, storage_account_name, storage_account_key):
        # Arrange.
        qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key)
        message = u'  mess\t age1\n'
        queue = qsc.get_queue_client(self.get_resource_name(TEST_QUEUE_PREFIX))

        # Asserts
        self._validate_encoding(queue, message)
Esempio n. 8
0
    def test_unicode_create_queue_unicode_name(self, resource_group, location, storage_account, storage_account_key):
        # Action
        qsc = QueueServiceClient(self.account_url(storage_account, "queue"), storage_account_key)
        queue_name = u'啊齄丂狛狜'

        with self.assertRaises(HttpResponseError):
            # not supported - queue name must be alphanumeric, lowercase
            client = qsc.get_queue_client(queue_name)
            client.create_queue()
    def test_message_text_xml(self, storage_account_name, storage_account_key):
        # Arrange.
        qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key)
        message = u'<message1>'
        queue = qsc.get_queue_client(self.get_resource_name(TEST_QUEUE_PREFIX))

        # Asserts
        assert isinstance(queue._config.message_encode_policy, NoEncodePolicy)
        assert isinstance(queue._config.message_decode_policy, NoDecodePolicy)
        self._validate_encoding(queue, message)
    def test_message_text_xml(self, resource_group, location, storage_account,
                              storage_account_key):
        # Arrange.
        qsc = QueueServiceClient(self._account_url(storage_account.name),
                                 storage_account_key)
        message = u'<message1>'
        queue = qsc.get_queue_client(self.get_resource_name(TEST_QUEUE_PREFIX))

        # Asserts
        self._validate_encoding(queue, message)
Esempio n. 11
0
    def test_response_callback(self, resource_group, location, storage_account, storage_account_key):
        # Arrange
        service = QueueServiceClient(self.account_url(storage_account, "queue"), credential=storage_account_key)
        name = self.get_resource_name('cont')
        queue = service.get_queue_client(name)

        # Act
        def callback(response):
            response.http_response.status_code = 200
            response.http_response.headers.clear()

        # Assert
        exists = queue.get_queue_properties(raw_response_hook=callback)
        self.assertTrue(exists)
    def test_message_bytes_fails(self, storage_account_name, storage_account_key):
        # Arrange
        qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key)
        queue = qsc.get_queue_client(self.get_resource_name(TEST_QUEUE_PREFIX))
        queue.create_queue()


        # Action.
        with self.assertRaises(TypeError) as e:
            message = b'xyz'
            queue.send_message(message)

            # Asserts
            self.assertTrue(str(e.exception).startswith('Message content must not be bytes. Use the BinaryBase64EncodePolicy to send bytes.'))
    def test_service_client_api_version_property(self):
        service_client = QueueServiceClient(
            "https://foo.queue.core.windows.net/account",
            credential="fake_key")
        self.assertEqual(service_client.api_version, self.api_version_2)
        self.assertEqual(service_client._client._config.version,
                         self.api_version_2)

        with pytest.raises(AttributeError):
            service_client.api_version = "foo"

        service_client = QueueServiceClient(
            "https://foo.queue.core.windows.net/account",
            credential="fake_key",
            api_version=self.api_version_1)
        self.assertEqual(service_client.api_version, self.api_version_1)
        self.assertEqual(service_client._client._config.version,
                         self.api_version_1)

        queue_client = service_client.get_queue_client("foo")
        self.assertEqual(queue_client.api_version, self.api_version_1)
        self.assertEqual(queue_client._client._config.version,
                         self.api_version_1)
Esempio n. 14
0
from smart_getenv import getenv
from dotmap import DotMap
from dotenv import load_dotenv, find_dotenv

load_dotenv(find_dotenv())

sleep = getenv("AZURE_STORAGE_QUEUE_RECEIVE_SLEEP", type=bool, default=1)

credential = ChainedTokenCredential(AzureCliCredential(),
                                    EnvironmentCredential(),
                                    ManagedIdentityCredential())

queue_service_client = QueueServiceClient(
    account_url=getenv("AZURE_STORAGE_QUEUE_ENDPOINT"), credential=credential)

queue_client = queue_service_client.get_queue_client(
    queue=getenv("AZURE_STORAGE_QUEUE_NAME", default="messages"))

fr_client = FormRecognizerClient(
    endpoint=getenv("AZURE_FORM_RECOGNIZER_ENDPOINT"), credential=credential)

ta_client = TextAnalyticsClient(
    endpoint=getenv("AZURE_TEXT_ANALYTICS_ENDPOINT"), credential=credential)

cosmos_client = CosmosClient(url=getenv("AZURE_COSMOS_ENDPOINT"),
                             credential=getenv("AZURE_COSMOS_KEY"))

cosmos_database_client = cosmos_client.get_database_client(
    getenv("AZURE_COSMOS_DB", default="azimageai"))

cosmos_container_client = cosmos_database_client.get_container_client(
    getenv("AZURE_COSMOS_CONTAINER", default="images"))
Esempio n. 15
0
import base64
from azure.storage.queue import QueueServiceClient

app = Flask(__name__)

task_pid = None
session_statuses = {}

service = QueueServiceClient(
    account_url="https://genomicsdatalake.queue.core.windows.net/",
    credential=
    "gfHgyeOy7iWjXKi9p2ijKCgnzVR1Da8/pXtQTkHUHGqhWVrCOGhqfI1ifbDxTZ/sFCg/u/JnQIstA/BcUD8mUA=="
)

queue_name = "amissqueue"
queue_client = service.get_queue_client(queue_name)


def update_status(sessionid, pid, status, pipe, task, message=''):

    new_status = {
        'sessionid': sessionid,
        'pid': pid,
        'status': status,
        'pipe': pipe,
        'task': task,
        'message': message,
        'last_updated': datetime.now().strftime('%x %X')
    }

    session_statuses[sessionid] = new_status
Esempio n. 16
0
class AzureFunctionAppBackend:
    """
    A wrap-up around Azure Function Apps backend.
    """

    def __init__(self, config, storage_config):
        logger.debug("Creating Azure Functions client")
        self.name = 'azure_fa'
        self.azure_config = config
        self.resource_group = self.azure_config['resource_group']
        self.storage_account = self.azure_config['storage_account']
        self.account_key = self.azure_config['storage_account_key']
        self.location = self.azure_config['location']
        self.functions_version = self.azure_config['functions_version']

        self.queue_service_url = 'https://{}.queue.core.windows.net'.format(self.storage_account)
        self.queue_service = QueueServiceClient(account_url=self.queue_service_url,
                                                credential=self.account_key)

        msg = COMPUTE_CLI_MSG.format('Azure Functions')
        logger.info("{} - Location: {}".format(msg, self.location))

    def _format_action_name(self, runtime_name, runtime_memory=None):
        runtime_name = runtime_name.replace('/', '--').replace(':', '--')
        return runtime_name

    def _format_queue_name(self, action_name, q_type):
        runtime_name = action_name.replace('--', '-')
        return runtime_name+'-'+q_type

    def _get_default_runtime_image_name(self):
        py_version = version_str(sys.version_info).replace('.', '')
        revision = 'latest' if 'dev' in __version__ else __version__.replace('.', '')
        runtime_name = '{}-{}-v{}-{}'.format(self.storage_account, az_config.RUNTIME_NAME,
                                             py_version, revision)
        return runtime_name

    def create_runtime(self, docker_image_name, memory=None, timeout=az_config.RUNTIME_TIMEOUT):
        """
        Creates a new runtime into Azure Function Apps
        from the provided Linux image for consumption plan
        """
        default_runtime_img_name = self._get_default_runtime_image_name()
        if docker_image_name in ['default', default_runtime_img_name]:
            # We only build the default image. rest of images must already exist
            # in the docker registry.
            docker_image_name = default_runtime_img_name
            self._build_default_runtime(default_runtime_img_name)

        logger.info('Creating new Lithops runtime for Azure Function Apps')
        self._create_function(docker_image_name, memory, timeout)
        metadata = self._generate_runtime_meta(docker_image_name, memory)

        return metadata

    def _build_default_runtime(self, default_runtime_img_name):
        """
        Builds the default runtime
        """
        return self.build_runtime(default_runtime_img_name)

        if os.system('{} --version >{} 2>&1'.format(az_config.DOCKER_PATH, os.devnull)) == 0:
            # Build default runtime using local dokcer
            python_version = version_str(sys.version_info)
            dockerfile = "Dockefile.default-azure-runtime"
            with open(dockerfile, 'w') as f:
                f.write("FROM mcr.microsoft.com/azure-functions/python:3.0-python{}\n".format(python_version))
                f.write(az_config.DEFAULT_DOCKERFILE)
            self.build_runtime_docker(default_runtime_img_name, dockerfile)
            os.remove(dockerfile)
        else:
            raise Exception('docker command not found. Install docker or use '
                            'an already built runtime')

    def build_runtime(self, runtime_name, requirements_file=None):
        try:
            shutil.rmtree(az_config.BUILD_DIR)
        except Exception:
            pass

        action_name = self._format_action_name(runtime_name)

        build_dir = os.path.join(az_config.BUILD_DIR, action_name)
        os.makedirs(build_dir, exist_ok=True)

        logger.info('Building default runtime in {}'.format(build_dir))

        action_dir = os.path.join(build_dir, az_config.ACTION_DIR)
        os.makedirs(action_dir, exist_ok=True)

        req_file = os.path.join(build_dir, 'requirements.txt')
        with open(req_file, 'w') as reqf:
            reqf.write(az_config.REQUIREMENTS_FILE)

        host_file = os.path.join(build_dir, 'host.json')
        with open(host_file, 'w') as hstf:
            hstf.write(az_config.HOST_FILE)

        fn_file = os.path.join(action_dir, 'function.json')
        with open(fn_file, 'w') as fnf:
            in_q_name = self._format_queue_name(action_name, az_config.IN_QUEUE)
            az_config.BINDINGS['bindings'][0]['queueName'] = in_q_name
            out_q_name = self._format_queue_name(action_name, az_config.OUT_QUEUE)
            az_config.BINDINGS['bindings'][1]['queueName'] = out_q_name
            fnf.write(json.dumps(az_config.BINDINGS))

        entry_point = os.path.join(os.path.dirname(__file__), 'entry_point.py')
        main_file = os.path.join(action_dir, '__init__.py')
        shutil.copy(entry_point, main_file)

        mod_dir = os.path.join(build_dir, az_config.ACTION_MODULES_DIR)
        os.chdir(build_dir)
        cmd = 'pip3 install -U -t {} -r requirements.txt'.format(mod_dir)
        if logger.getEffectiveLevel() != logging.DEBUG:
            cmd = cmd + " >{} 2>&1".format(os.devnull)
        os.system(cmd)
        lithops_location = os.path.dirname(os.path.abspath(lithops.__file__))
        shutil.copytree(lithops_location, os.path.join(mod_dir, 'lithops'))

    def build_runtime_docker(self, docker_image_name, dockerfile):
        """
        Builds a new runtime from a Docker file and pushes it to the Docker hub
        """
        logger.debug('Building new docker image from Dockerfile')
        logger.debug('Docker image name: {}'.format(docker_image_name))

        entry_point = os.path.join(os.path.dirname(__file__), 'entry_point.py')
        create_handler_zip(az_config.FH_ZIP_LOCATION, entry_point, '__init__.py')

        if dockerfile:
            cmd = '{} build -t {} -f {} .'.format(az_config.DOCKER_PATH,
                                                  docker_image_name,
                                                  dockerfile)
        else:
            cmd = '{} build -t {} .'.format(az_config.DOCKER_PATH, docker_image_name)

        if logger.getEffectiveLevel() != logging.DEBUG:
            cmd = cmd + " >{} 2>&1".format(os.devnull)

        logger.info('Building default runtime')
        res = os.system(cmd)
        if res != 0:
            raise Exception('There was an error building the runtime')

        cmd = '{} push {}'.format(az_config.DOCKER_PATH, docker_image_name)
        if logger.getEffectiveLevel() != logging.DEBUG:
            cmd = cmd + " >{} 2>&1".format(os.devnull)
        res = os.system(cmd)
        if res != 0:
            raise Exception('There was an error pushing the runtime to the container registry')
        logger.debug('Done!')

    def _create_function(self, docker_image_name, memory=None,
                         timeout=az_config.RUNTIME_TIMEOUT):
        """
        Create and publish an Azure Function App
        """
        action_name = self._format_action_name(docker_image_name, memory)

        try:
            in_q_name = self._format_queue_name(action_name, az_config.IN_QUEUE)
            self.queue_service.create_queue(in_q_name)
        except Exception:
            in_queue = self.queue_service.get_queue_client(in_q_name)
            in_queue.clear_messages()
        try:
            out_q_name = self._format_queue_name(action_name, az_config.OUT_QUEUE)
            self.queue_service.create_queue(out_q_name)
        except Exception:
            out_queue = self.queue_service.get_queue_client(out_q_name)
            out_queue.clear_messages()

        logger.debug('Creating function app')
        logger.debug('Function name: {}'.format(action_name))
        python_version = version_str(sys.version_info)
        cmd = ('az functionapp create --name {} --storage-account {} '
               '--resource-group {} --os-type Linux  --runtime python '
               '--runtime-version {} --functions-version {} --consumption-plan-location {}'
               .format(action_name, self.storage_account, self.resource_group,
                       python_version, self.functions_version, self.location))
        if logger.getEffectiveLevel() != logging.DEBUG:
            cmd = cmd + " >{} 2>&1".format(os.devnull)
        res = os.system(cmd)
        if res != 0:
            raise Exception('There was an error creating the function in Azure. cmd: {}'.format(cmd))

        logger.debug('Publishing function app')
        build_dir = os.path.join(az_config.BUILD_DIR, action_name)
        os.chdir(build_dir)
        res = 1
        while res != 0:
            time.sleep(5)
            cmd = 'func azure functionapp publish {} --python --no-build'.format(action_name)
            if logger.getEffectiveLevel() != logging.DEBUG:
                cmd = cmd + " >{} 2>&1".format(os.devnull)
            res = os.system(cmd)

        time.sleep(10)

    def delete_runtime(self, runtime_name, memory):
        """
        Deletes a runtime
        """
        action_name = self._format_action_name(runtime_name, memory)

        logger.debug('Deleting function app: {}'.format(action_name))
        cmd = ('az functionapp delete --name {} --resource-group {}'
               .format(action_name, self.resource_group))
        if logger.getEffectiveLevel() != logging.DEBUG:
            cmd = cmd + " >{} 2>&1".format(os.devnull)

        try:
            in_q_name = self._format_queue_name(action_name, az_config.IN_QUEUE)
            self.queue_service.delete_queue(in_q_name)
        except Exception:
            pass
        try:
            out_q_name = self._format_queue_name(action_name, az_config.OUT_QUEUE)
            self.queue_service.delete_queue(out_q_name)
        except Exception:
            pass

    def invoke(self, docker_image_name, memory=None, payload={}, return_result=False):
        """
        Invoke function
        """
        action_name = self._format_action_name(docker_image_name)
        in_q_name = self._format_queue_name(action_name, az_config.IN_QUEUE)
        in_queue = self.queue_service.get_queue_client(in_q_name)
        msg = in_queue.send_message(dict_to_b64str(payload))
        activation_id = msg.id

        if return_result:
            out_q_name = self._format_queue_name(action_name, az_config.OUT_QUEUE)
            out_queue = self.queue_service.get_queue_client(out_q_name)
            msg = []
            while not msg:
                time.sleep(1)
                msg = out_queue.receive_message()
            out_queue.clear_messages()
            return b64str_to_dict(msg.content)

        return activation_id

    def get_runtime_key(self, docker_image_name, runtime_memory):
        """
        Method that creates and returns the runtime key.
        Runtime keys are used to uniquely identify runtimes within the storage,
        in order to know which runtimes are installed and which not.
        """
        action_name = self._format_action_name(docker_image_name, runtime_memory)
        runtime_key = os.path.join(self.name, action_name)

        return runtime_key

    def clean(self):
        # TODO
        pass

    def _generate_runtime_meta(self, docker_image_name, memory):
        """
        Extract installed Python modules from Azure runtime
        """
        logger.info("Extracting Python modules from: {}".format(docker_image_name))
        payload = {'log_level': logger.getEffectiveLevel(), 'get_preinstalls': True}

        try:
            runtime_meta = self.invoke(docker_image_name, memory=memory,
                                       payload=payload, return_result=True)
        except Exception:
            raise Exception("Unable to invoke 'extract-preinstalls' action")

        if not runtime_meta or 'preinstalls' not in runtime_meta:
            raise Exception(runtime_meta)

        logger.debug("Extracted metadata succesfully")
        return runtime_meta
Esempio n. 17
0
class StorageQueueTest(QueueTestCase):
    def setUp(self):
        super(StorageQueueTest, self).setUp()

        queue_url = self._get_queue_url()
        credentials = self._get_shared_key_credential()
        self.qsc = QueueServiceClient(account_url=queue_url,
                                      credential=credentials)
        self.test_queues = []

    def tearDown(self):
        if not self.is_playback():
            for queue in self.test_queues:
                try:
                    queue.delete_queue()
                except:
                    pass
        return super(StorageQueueTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_queue_reference(self, prefix=TEST_QUEUE_PREFIX):
        queue_name = self.get_resource_name(prefix)
        queue = self.qsc.get_queue_client(queue_name)
        self.test_queues.append(queue)
        return queue

    def _create_queue(self, prefix=TEST_QUEUE_PREFIX):
        queue = self._get_queue_reference(prefix)
        created = queue.create_queue()
        return queue

    # --Test cases for queues ----------------------------------------------
    @record
    def test_create_queue(self):
        # Action
        queue_client = self._get_queue_reference()
        created = queue_client.create_queue()

        # Asserts
        self.assertTrue(created)

    @record
    def test_create_queue_fail_on_exist(self):
        # Action
        queue_client = self._get_queue_reference()
        created = queue_client.create_queue()
        with self.assertRaises(ResourceExistsError):
            queue_client.create_queue()

        # Asserts
        self.assertTrue(created)

    @record
    def test_create_queue_fail_on_exist_different_metadata(self):
        # Action
        queue_client = self._get_queue_reference()
        created = queue_client.create_queue()
        with self.assertRaises(ResourceExistsError):
            queue_client.create_queue(metadata={"val": "value"})

        # Asserts
        self.assertTrue(created)

    @record
    def test_create_queue_with_options(self):
        # Action
        queue_client = self._get_queue_reference()
        queue_client.create_queue(metadata={'val1': 'test', 'val2': 'blah'})
        props = queue_client.get_queue_properties()

        # Asserts
        self.assertEqual(0, props.approximate_message_count)
        self.assertEqual(2, len(props.metadata))
        self.assertEqual('test', props.metadata['val1'])
        self.assertEqual('blah', props.metadata['val2'])

    @record
    def test_delete_non_existing_queue(self):
        # Action
        queue_client = self._get_queue_reference()

        # Asserts
        with self.assertRaises(ResourceNotFoundError):
            queue_client.delete_queue()

    @record
    def test_delete_existing_queue_fail_not_exist(self):
        # Action
        queue_client = self._get_queue_reference()

        created = queue_client.create_queue()
        deleted = queue_client.delete_queue()

        # Asserts
        self.assertIsNone(deleted)

    @record
    def test_list_queues(self):
        # Action
        queues = list(self.qsc.list_queues())

        # Asserts
        self.assertIsNotNone(queues)
        self.assertTrue(len(self.test_queues) <= len(queues))

    @record
    def test_list_queues_with_options(self):
        # Arrange
        prefix = 'listqueue'
        for i in range(0, 4):
            self._create_queue(prefix + str(i))

        # Action
        generator1 = self.qsc.list_queues(name_starts_with=prefix,
                                          results_per_page=3).by_page()
        queues1 = list(next(generator1))

        generator2 = self.qsc.list_queues(name_starts_with=prefix,
                                          include_metadata=True).by_page(
                                              generator1.continuation_token)
        queues2 = list(next(generator2))

        # Asserts
        self.assertIsNotNone(queues1)
        self.assertEqual(3, len(queues1))
        self.assertIsNotNone(queues1[0])
        self.assertIsNone(queues1[0].metadata)
        self.assertNotEqual('', queues1[0].name)
        assert generator1.location_mode is not None
        # Asserts
        self.assertIsNotNone(queues2)
        self.assertTrue(len(self.test_queues) - 3 <= len(queues2))
        self.assertIsNotNone(queues2[0])
        self.assertNotEqual('', queues2[0].name)

    @record
    def test_list_queues_with_metadata(self):
        # Action
        queue = self._create_queue()
        queue.set_queue_metadata(metadata={'val1': 'test', 'val2': 'blah'})

        listed_queue = list(
            self.qsc.list_queues(name_starts_with=queue.queue_name,
                                 results_per_page=1,
                                 include_metadata=True))[0]

        # Asserts
        self.assertIsNotNone(listed_queue)
        self.assertEqual(queue.queue_name, listed_queue.name)
        self.assertIsNotNone(listed_queue.metadata)
        self.assertEqual(len(listed_queue.metadata), 2)
        self.assertEqual(listed_queue.metadata['val1'], 'test')

    @record
    def test_set_queue_metadata(self):
        # Action
        metadata = {'hello': 'world', 'number': '43'}
        queue = self._create_queue()

        # Act
        queue.set_queue_metadata(metadata)
        metadata_from_response = queue.get_queue_properties().metadata
        # Assert
        self.assertDictEqual(metadata_from_response, metadata)

    @record
    def test_get_queue_metadata_message_count(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        props = queue_client.get_queue_properties()

        # Asserts
        self.assertTrue(props.approximate_message_count >= 1)
        self.assertEqual(0, len(props.metadata))

    @record
    def test_queue_exists(self):
        # Arrange
        queue = self._create_queue()

        # Act
        exists = queue.get_queue_properties()

        # Assert
        self.assertTrue(exists)

    @record
    def test_queue_not_exists(self):
        # Arrange
        queue = self.qsc.get_queue_client(self.get_resource_name('missing'))
        # Act
        with self.assertRaises(ResourceNotFoundError):
            queue.get_queue_properties()

        # Assert

    @record
    def test_put_message(self):
        # Action.  No exception means pass. No asserts needed.
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        queue_client.enqueue_message(u'message2')
        queue_client.enqueue_message(u'message3')
        message = queue_client.enqueue_message(u'message4')

        # Asserts
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertIsInstance(message.insertion_time, datetime)
        self.assertIsInstance(message.expiration_time, datetime)
        self.assertNotEqual('', message.pop_receipt)
        self.assertEqual(u'message4', message.content)

    @record
    def test_put_message_large_time_to_live(self):
        # Arrange
        queue_client = self._create_queue()
        # There should be no upper bound on a queue message's time to live
        queue_client.enqueue_message(u'message1',
                                     time_to_live=1024 * 1024 * 1024)

        # Act
        messages = queue_client.peek_messages()

        # Assert
        self.assertGreaterEqual(
            messages[0].expiration_time, messages[0].insertion_time +
            timedelta(seconds=1024 * 1024 * 1024 - 3600))

    @record
    def test_put_message_infinite_time_to_live(self):
        # Arrange
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1', time_to_live=-1)

        # Act
        messages = queue_client.peek_messages()

        # Assert
        self.assertEqual(messages[0].expiration_time.year, date.max.year)

    @record
    def test_get_messages(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        queue_client.enqueue_message(u'message2')
        queue_client.enqueue_message(u'message3')
        queue_client.enqueue_message(u'message4')
        message = next(queue_client.receive_messages())

        # Asserts
        self.assertIsNotNone(message)
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)
        self.assertNotEqual('', message.pop_receipt)
        self.assertEqual(1, message.dequeue_count)

        self.assertIsInstance(message.insertion_time, datetime)
        self.assertIsInstance(message.expiration_time, datetime)
        self.assertIsInstance(message.time_next_visible, datetime)

    @record
    def test_get_messages_with_options(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        queue_client.enqueue_message(u'message2')
        queue_client.enqueue_message(u'message3')
        queue_client.enqueue_message(u'message4')
        pager = queue_client.receive_messages(messages_per_page=4,
                                              visibility_timeout=20)
        result = list(pager)

        # Asserts
        self.assertIsNotNone(result)
        self.assertEqual(4, len(result))

        for message in result:
            self.assertIsNotNone(message)
            self.assertNotEqual('', message.id)
            self.assertNotEqual('', message.content)
            self.assertNotEqual('', message.pop_receipt)
            self.assertEqual(1, message.dequeue_count)
            self.assertNotEqual('', message.insertion_time)
            self.assertNotEqual('', message.expiration_time)
            self.assertNotEqual('', message.time_next_visible)

    @record
    def test_peek_messages(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        queue_client.enqueue_message(u'message2')
        queue_client.enqueue_message(u'message3')
        queue_client.enqueue_message(u'message4')
        result = queue_client.peek_messages()

        # Asserts
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertNotEqual('', message.content)
        self.assertIsNone(message.pop_receipt)
        self.assertEqual(0, message.dequeue_count)
        self.assertNotEqual('', message.insertion_time)
        self.assertNotEqual('', message.expiration_time)
        self.assertIsNone(message.time_next_visible)

    @record
    def test_peek_messages_with_options(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        queue_client.enqueue_message(u'message2')
        queue_client.enqueue_message(u'message3')
        queue_client.enqueue_message(u'message4')
        result = queue_client.peek_messages(max_messages=4)

        # Asserts
        self.assertIsNotNone(result)
        self.assertEqual(4, len(result))
        for message in result:
            self.assertIsNotNone(message)
            self.assertNotEqual('', message.id)
            self.assertNotEqual('', message.content)
            self.assertIsNone(message.pop_receipt)
            self.assertEqual(0, message.dequeue_count)
            self.assertNotEqual('', message.insertion_time)
            self.assertNotEqual('', message.expiration_time)
            self.assertIsNone(message.time_next_visible)

    @record
    def test_clear_messages(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        queue_client.enqueue_message(u'message2')
        queue_client.enqueue_message(u'message3')
        queue_client.enqueue_message(u'message4')
        queue_client.clear_messages()
        result = queue_client.peek_messages()

        # Asserts
        self.assertIsNotNone(result)
        self.assertEqual(0, len(result))

    @record
    def test_delete_message(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        queue_client.enqueue_message(u'message2')
        queue_client.enqueue_message(u'message3')
        queue_client.enqueue_message(u'message4')
        message = next(queue_client.receive_messages())
        queue_client.delete_message(message)

        messages_pager = queue_client.receive_messages(messages_per_page=32)
        messages = list(messages_pager)

        # Asserts
        assert messages is not None
        assert len(messages) == 3

    @record
    def test_update_message(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        messages = queue_client.receive_messages()
        list_result1 = next(messages)
        message = queue_client.update_message(
            list_result1.id,
            pop_receipt=list_result1.pop_receipt,
            visibility_timeout=0)
        list_result2 = next(messages)

        # Asserts
        # Update response
        self.assertIsNotNone(message)
        self.assertIsNotNone(message.pop_receipt)
        self.assertIsNotNone(message.time_next_visible)
        self.assertIsInstance(message.time_next_visible, datetime)

        # Get response
        self.assertIsNotNone(list_result2)
        message = list_result2
        self.assertIsNotNone(message)
        self.assertEqual(list_result1.id, message.id)
        self.assertEqual(u'message1', message.content)
        self.assertEqual(2, message.dequeue_count)
        self.assertIsNotNone(message.pop_receipt)
        self.assertIsNotNone(message.insertion_time)
        self.assertIsNotNone(message.expiration_time)
        self.assertIsNotNone(message.time_next_visible)

    @record
    def test_update_message_content(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')

        messages = queue_client.receive_messages()
        list_result1 = next(messages)
        message = queue_client.update_message(
            list_result1.id,
            pop_receipt=list_result1.pop_receipt,
            visibility_timeout=0,
            content=u'new text')
        list_result2 = next(messages)

        # Asserts
        # Update response
        self.assertIsNotNone(message)
        self.assertIsNotNone(message.pop_receipt)
        self.assertIsNotNone(message.time_next_visible)
        self.assertIsInstance(message.time_next_visible, datetime)

        # Get response
        self.assertIsNotNone(list_result2)
        message = list_result2
        self.assertIsNotNone(message)
        self.assertEqual(list_result1.id, message.id)
        self.assertEqual(u'new text', message.content)
        self.assertEqual(2, message.dequeue_count)
        self.assertIsNotNone(message.pop_receipt)
        self.assertIsNotNone(message.insertion_time)
        self.assertIsNotNone(message.expiration_time)
        self.assertIsNotNone(message.time_next_visible)

    def test_account_sas(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        token = self.qsc.generate_shared_access_signature(
            ResourceTypes.OBJECT, AccountPermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
            datetime.utcnow() - timedelta(minutes=5))

        # Act
        service = QueueServiceClient(
            account_url=self.qsc.url,
            credential=token,
        )
        new_queue_client = service.get_queue_client(queue_client.queue_name)
        result = new_queue_client.peek_messages()

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)

    @record
    def test_token_credential(self):
        pytest.skip("")
        token_credential = self.generate_oauth_token()

        # Action 1: make sure token works
        service = QueueServiceClient(self._get_oauth_queue_url(),
                                     credential=token_credential)
        queues = service.get_service_properties()
        self.assertIsNotNone(queues)

        # Action 2: change token value to make request fail
        fake_credential = self.generate_fake_token()
        service = QueueServiceClient(self._get_oauth_queue_url(),
                                     credential=fake_credential)
        with self.assertRaises(ClientAuthenticationError):
            list(service.list_queues())

        # Action 3: update token to make it working again
        service = QueueServiceClient(self._get_oauth_queue_url(),
                                     credential=token_credential)
        queues = list(service.list_queues())
        self.assertIsNotNone(queues)

    def test_sas_read(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        token = queue_client.generate_shared_access_signature(
            QueuePermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
            datetime.utcnow() - timedelta(minutes=5))

        # Act
        service = QueueClient(
            queue_url=queue_client.url,
            credential=token,
        )
        result = service.peek_messages()

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)

    def test_sas_add(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_client = self._create_queue()
        token = queue_client.generate_shared_access_signature(
            QueuePermissions.ADD,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = QueueClient(
            queue_url=queue_client.url,
            credential=token,
        )
        result = service.enqueue_message(u'addedmessage')

        # Assert
        result = next(queue_client.receive_messages())
        self.assertEqual(u'addedmessage', result.content)

    def test_sas_update(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        token = queue_client.generate_shared_access_signature(
            QueuePermissions.UPDATE,
            datetime.utcnow() + timedelta(hours=1),
        )
        messages = queue_client.receive_messages()
        result = next(messages)

        # Act
        service = QueueClient(
            queue_url=queue_client.url,
            credential=token,
        )
        service.update_message(
            result.id,
            pop_receipt=result.pop_receipt,
            visibility_timeout=0,
            content=u'updatedmessage1',
        )

        # Assert
        result = next(messages)
        self.assertEqual(u'updatedmessage1', result.content)

    def test_sas_process(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        token = queue_client.generate_shared_access_signature(
            QueuePermissions.PROCESS,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = QueueClient(
            queue_url=queue_client.url,
            credential=token,
        )
        message = next(service.receive_messages())

        # Assert
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)

    def test_sas_signed_identifier(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        access_policy = AccessPolicy()
        access_policy.start = datetime.utcnow() - timedelta(hours=1)
        access_policy.expiry = datetime.utcnow() + timedelta(hours=1)
        access_policy.permission = QueuePermissions.READ

        identifiers = {'testid': access_policy}

        queue_client = self._create_queue()
        resp = queue_client.set_queue_access_policy(identifiers)

        queue_client.enqueue_message(u'message1')

        token = queue_client.generate_shared_access_signature(
            policy_id='testid')

        # Act
        service = QueueClient(
            queue_url=queue_client.url,
            credential=token,
        )
        result = service.peek_messages()

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)

    @record
    def test_get_queue_acl(self):
        # Arrange
        queue_client = self._create_queue()

        # Act
        acl = queue_client.get_queue_access_policy()

        # Assert
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl), 0)

    @record
    def test_get_queue_acl_iter(self):
        # Arrange
        queue_client = self._create_queue()

        # Act
        acl = queue_client.get_queue_access_policy()
        for signed_identifier in acl:
            pass

        # Assert
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl), 0)

    @record
    def test_get_queue_acl_with_non_existing_queue(self):
        # Arrange
        queue_client = self._get_queue_reference()

        # Act
        with self.assertRaises(ResourceNotFoundError):
            queue_client.get_queue_access_policy()

            # Assert

    @record
    def test_set_queue_acl(self):
        # Arrange
        queue_client = self._create_queue()

        # Act
        resp = queue_client.set_queue_access_policy()

        # Assert
        self.assertIsNone(resp)
        acl = queue_client.get_queue_access_policy()
        self.assertIsNotNone(acl)

    @record
    def test_set_queue_acl_with_empty_signed_identifiers(self):
        # Arrange
        queue_client = self._create_queue()

        # Act
        queue_client.set_queue_access_policy(signed_identifiers={})

        # Assert
        acl = queue_client.get_queue_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl), 0)

    @record
    def test_set_queue_acl_with_empty_signed_identifier(self):
        # Arrange
        queue_client = self._create_queue()

        # Act
        queue_client.set_queue_access_policy(
            signed_identifiers={'empty': AccessPolicy()})

        # Assert
        acl = queue_client.get_queue_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl), 1)
        self.assertIsNotNone(acl['empty'])
        self.assertIsNone(acl['empty'].permission)
        self.assertIsNone(acl['empty'].expiry)
        self.assertIsNone(acl['empty'].start)

    @record
    def test_set_queue_acl_with_signed_identifiers(self):
        # Arrange
        queue_client = self._create_queue()

        # Act
        access_policy = AccessPolicy(
            permission=QueuePermissions.READ,
            expiry=datetime.utcnow() + timedelta(hours=1),
            start=datetime.utcnow() - timedelta(minutes=5))
        identifiers = {'testid': access_policy}

        resp = queue_client.set_queue_access_policy(
            signed_identifiers=identifiers)

        # Assert
        self.assertIsNone(resp)
        acl = queue_client.get_queue_access_policy()
        self.assertIsNotNone(acl)
        self.assertEqual(len(acl), 1)
        self.assertTrue('testid' in acl)

    @record
    def test_set_queue_acl_too_many_ids(self):
        # Arrange
        queue_client = self._create_queue()

        # Act
        identifiers = dict()
        for i in range(0, 16):
            identifiers['id{}'.format(i)] = AccessPolicy()

        # Assert
        with self.assertRaises(ValueError):
            queue_client.set_queue_access_policy(identifiers)

    @record
    def test_set_queue_acl_with_non_existing_queue(self):
        # Arrange
        queue_client = self._get_queue_reference()

        # Act
        with self.assertRaises(ResourceNotFoundError):
            queue_client.set_queue_access_policy()

            # Assert

    @record
    def test_unicode_create_queue_unicode_name(self):
        # Action
        queue_name = u'啊齄丂狛狜'

        with self.assertRaises(HttpResponseError):
            # not supported - queue name must be alphanumeric, lowercase
            client = self.qsc.get_queue_client(queue_name)
            client.create_queue()

            # Asserts

    @record
    def test_unicode_get_messages_unicode_data(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1㚈')
        message = next(queue_client.receive_messages())

        # Asserts
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1㚈', message.content)
        self.assertNotEqual('', message.pop_receipt)
        self.assertEqual(1, message.dequeue_count)
        self.assertIsInstance(message.insertion_time, datetime)
        self.assertIsInstance(message.expiration_time, datetime)
        self.assertIsInstance(message.time_next_visible, datetime)

    @record
    def test_unicode_update_message_unicode_data(self):
        # Action
        queue_client = self._create_queue()
        queue_client.enqueue_message(u'message1')
        messages = queue_client.receive_messages()

        list_result1 = next(messages)
        list_result1.content = u'啊齄丂狛狜'
        queue_client.update_message(list_result1, visibility_timeout=0)

        # Asserts
        message = next(messages)
        self.assertIsNotNone(message)
        self.assertEqual(list_result1.id, message.id)
        self.assertEqual(u'啊齄丂狛狜', message.content)
        self.assertNotEqual('', message.pop_receipt)
        self.assertEqual(2, message.dequeue_count)
        self.assertIsInstance(message.insertion_time, datetime)
        self.assertIsInstance(message.expiration_time, datetime)
        self.assertIsInstance(message.time_next_visible, datetime)
class StorageQueueEncryptionTest(QueueTestCase):
    def setUp(self):
        super(StorageQueueEncryptionTest, self).setUp()

        queue_url = self._get_queue_url()
        credentials = self._get_shared_key_credential()
        self.qsc = QueueServiceClient(account_url=queue_url,
                                      credential=credentials)
        self.test_queues = []

    def tearDown(self):
        if not self.is_playback():
            for queue in self.test_queues:
                try:
                    self.qsc.delete_queue(queue.queue_name)
                except:
                    pass
        return super(StorageQueueEncryptionTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_queue_reference(self, prefix=TEST_QUEUE_PREFIX):
        queue_name = self.get_resource_name(prefix)
        queue = self.qsc.get_queue_client(queue_name)
        self.test_queues.append(queue)
        return queue

    def _create_queue(self, prefix=TEST_QUEUE_PREFIX):
        queue = self._get_queue_reference(prefix)
        try:
            created = queue.create_queue()
        except ResourceExistsError:
            pass
        return queue

    # --------------------------------------------------------------------------

    @record
    def test_get_messages_encrypted_kek(self):
        # Arrange
        self.qsc.key_encryption_key = KeyWrapper('key1')
        queue = self._create_queue()
        queue.enqueue_message(u'encrypted_message_2')

        # Act
        li = next(queue.receive_messages())

        # Assert
        self.assertEqual(li.content, u'encrypted_message_2')

    @record
    def test_get_messages_encrypted_resolver(self):
        # Arrange
        self.qsc.key_encryption_key = KeyWrapper('key1')
        queue = self._create_queue()
        queue.enqueue_message(u'encrypted_message_2')
        key_resolver = KeyResolver()
        key_resolver.put_key(self.qsc.key_encryption_key)
        queue.key_resolver_function = key_resolver.resolve_key
        queue.key_encryption_key = None  # Ensure that the resolver is used

        # Act
        li = next(queue.receive_messages())

        # Assert
        self.assertEqual(li.content, u'encrypted_message_2')

    @record
    def test_peek_messages_encrypted_kek(self):
        # Arrange
        self.qsc.key_encryption_key = KeyWrapper('key1')
        queue = self._create_queue()
        queue.enqueue_message(u'encrypted_message_3')

        # Act
        li = queue.peek_messages()

        # Assert
        self.assertEqual(li[0].content, u'encrypted_message_3')

    @record
    def test_peek_messages_encrypted_resolver(self):
        # Arrange
        self.qsc.key_encryption_key = KeyWrapper('key1')
        queue = self._create_queue()
        queue.enqueue_message(u'encrypted_message_4')
        key_resolver = KeyResolver()
        key_resolver.put_key(self.qsc.key_encryption_key)
        queue.key_resolver_function = key_resolver.resolve_key
        queue.key_encryption_key = None  # Ensure that the resolver is used

        # Act
        li = queue.peek_messages()

        # Assert
        self.assertEqual(li[0].content, u'encrypted_message_4')

    def test_peek_messages_encrypted_kek_RSA(self):

        # We can only generate random RSA keys, so this must be run live or
        # the playback test will fail due to a change in kek values.
        if TestMode.need_recording_file(self.test_mode):
            return

            # Arrange
        self.qsc.key_encryption_key = RSAKeyWrapper('key2')
        queue = self._create_queue()
        queue.enqueue_message(u'encrypted_message_3')

        # Act
        li = queue.peek_messages()

        # Assert
        self.assertEqual(li[0].content, u'encrypted_message_3')

    @record
    def test_update_encrypted_message(self):
        # TODO: Recording doesn't work
        if TestMode.need_recording_file(self.test_mode):
            return
        # Arrange
        queue = self._create_queue()
        queue.key_encryption_key = KeyWrapper('key1')
        queue.enqueue_message(u'Update Me')

        messages = queue.receive_messages()
        list_result1 = next(messages)
        list_result1.content = u'Updated'

        # Act
        message = queue.update_message(list_result1)
        list_result2 = next(messages)

        # Assert
        self.assertEqual(u'Updated', list_result2.content)

    @record
    def test_update_encrypted_binary_message(self):
        # Arrange
        queue = self._create_queue()
        queue.key_encryption_key = KeyWrapper('key1')
        queue._config.message_encode_policy = BinaryBase64EncodePolicy()
        queue._config.message_decode_policy = BinaryBase64DecodePolicy()

        binary_message = self.get_random_bytes(100)
        queue.enqueue_message(binary_message)
        messages = queue.receive_messages()
        list_result1 = next(messages)

        # Act
        binary_message = self.get_random_bytes(100)
        list_result1.content = binary_message
        queue.update_message(list_result1)

        list_result2 = next(messages)

        # Assert
        self.assertEqual(binary_message, list_result2.content)

    @record
    def test_update_encrypted_raw_text_message(self):
        # TODO: Recording doesn't work
        if TestMode.need_recording_file(self.test_mode):
            return
        # Arrange
        queue = self._create_queue()
        queue.key_encryption_key = KeyWrapper('key1')
        queue._config.message_encode_policy = NoEncodePolicy()
        queue._config.message_decode_policy = NoDecodePolicy()

        raw_text = u'Update Me'
        queue.enqueue_message(raw_text)
        messages = queue.receive_messages()
        list_result1 = next(messages)

        # Act
        raw_text = u'Updated'
        list_result1.content = raw_text
        queue.update_message(list_result1)

        list_result2 = next(messages)

        # Assert
        self.assertEqual(raw_text, list_result2.content)

    @record
    def test_update_encrypted_json_message(self):
        # TODO: Recording doesn't work
        if TestMode.need_recording_file(self.test_mode):
            return
        # Arrange
        queue = self._create_queue()
        queue.key_encryption_key = KeyWrapper('key1')
        queue._config.message_encode_policy = NoEncodePolicy()
        queue._config.message_decode_policy = NoDecodePolicy()

        message_dict = {'val1': 1, 'val2': '2'}
        json_text = dumps(message_dict)
        queue.enqueue_message(json_text)
        messages = queue.receive_messages()
        list_result1 = next(messages)

        # Act
        message_dict['val1'] = 0
        message_dict['val2'] = 'updated'
        json_text = dumps(message_dict)
        list_result1.content = json_text
        queue.update_message(list_result1)

        list_result2 = next(messages)

        # Assert
        self.assertEqual(message_dict, loads(list_result2.content))

    @record
    def test_invalid_value_kek_wrap(self):
        # Arrange
        queue = self._create_queue()
        queue.key_encryption_key = KeyWrapper('key1')
        queue.key_encryption_key.get_kid = None

        with self.assertRaises(AttributeError) as e:
            queue.enqueue_message(u'message')

        self.assertEqual(
            str(e.exception),
            _ERROR_OBJECT_INVALID.format('key encryption key', 'get_kid'))

        queue.key_encryption_key = KeyWrapper('key1')
        queue.key_encryption_key.get_kid = None
        with self.assertRaises(AttributeError):
            queue.enqueue_message(u'message')

        queue.key_encryption_key = KeyWrapper('key1')
        queue.key_encryption_key.wrap_key = None
        with self.assertRaises(AttributeError):
            queue.enqueue_message(u'message')

    @record
    def test_missing_attribute_kek_wrap(self):
        # Arrange
        queue = self._create_queue()

        valid_key = KeyWrapper('key1')

        # Act
        invalid_key_1 = lambda: None  # functions are objects, so this effectively creates an empty object
        invalid_key_1.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm
        invalid_key_1.get_kid = valid_key.get_kid
        # No attribute wrap_key
        queue.key_encryption_key = invalid_key_1
        with self.assertRaises(AttributeError):
            queue.enqueue_message(u'message')

        invalid_key_2 = lambda: None  # functions are objects, so this effectively creates an empty object
        invalid_key_2.wrap_key = valid_key.wrap_key
        invalid_key_2.get_kid = valid_key.get_kid
        # No attribute get_key_wrap_algorithm
        queue.key_encryption_key = invalid_key_2
        with self.assertRaises(AttributeError):
            queue.enqueue_message(u'message')

        invalid_key_3 = lambda: None  # functions are objects, so this effectively creates an empty object
        invalid_key_3.get_key_wrap_algorithm = valid_key.get_key_wrap_algorithm
        invalid_key_3.wrap_key = valid_key.wrap_key
        # No attribute get_kid
        queue.key_encryption_key = invalid_key_3
        with self.assertRaises(AttributeError):
            queue.enqueue_message(u'message')

    @record
    def test_invalid_value_kek_unwrap(self):
        # Arrange
        queue = self._create_queue()
        queue.key_encryption_key = KeyWrapper('key1')
        queue.enqueue_message(u'message')

        # Act
        queue.key_encryption_key.unwrap_key = None
        with self.assertRaises(HttpResponseError):
            queue.peek_messages()

        queue.key_encryption_key.get_kid = None
        with self.assertRaises(HttpResponseError):
            queue.peek_messages()

    @record
    def test_missing_attribute_kek_unrwap(self):
        # Arrange
        queue = self._create_queue()
        queue.key_encryption_key = KeyWrapper('key1')
        queue.enqueue_message(u'message')

        # Act
        valid_key = KeyWrapper('key1')
        invalid_key_1 = lambda: None  # functions are objects, so this effectively creates an empty object
        invalid_key_1.unwrap_key = valid_key.unwrap_key
        # No attribute get_kid
        queue.key_encryption_key = invalid_key_1
        with self.assertRaises(HttpResponseError) as e:
            queue.peek_messages()

        self.assertEqual(str(e.exception), "Decryption failed.")

        invalid_key_2 = lambda: None  # functions are objects, so this effectively creates an empty object
        invalid_key_2.get_kid = valid_key.get_kid
        # No attribute unwrap_key
        queue.key_encryption_key = invalid_key_2
        with self.assertRaises(HttpResponseError):
            queue.peek_messages()

    @record
    def test_validate_encryption(self):
        # Arrange
        queue = self._create_queue()
        kek = KeyWrapper('key1')
        queue.key_encryption_key = kek
        queue.enqueue_message(u'message')

        # Act
        queue.key_encryption_key = None  # Message will not be decrypted
        li = queue.peek_messages()
        message = li[0].content
        message = loads(message)

        encryption_data = message['EncryptionData']

        wrapped_content_key = encryption_data['WrappedContentKey']
        wrapped_content_key = _WrappedContentKey(
            wrapped_content_key['Algorithm'],
            b64decode(
                wrapped_content_key['EncryptedKey'].encode(encoding='utf-8')),
            wrapped_content_key['KeyId'])

        encryption_agent = encryption_data['EncryptionAgent']
        encryption_agent = _EncryptionAgent(
            encryption_agent['EncryptionAlgorithm'],
            encryption_agent['Protocol'])

        encryption_data = _EncryptionData(
            b64decode(encryption_data['ContentEncryptionIV'].encode(
                encoding='utf-8')), encryption_agent, wrapped_content_key,
            {'EncryptionLibrary': VERSION})

        message = message['EncryptedMessageContents']
        content_encryption_key = kek.unwrap_key(
            encryption_data.wrapped_content_key.encrypted_key,
            encryption_data.wrapped_content_key.algorithm)

        # Create decryption cipher
        backend = backends.default_backend()
        algorithm = AES(content_encryption_key)
        mode = CBC(encryption_data.content_encryption_IV)
        cipher = Cipher(algorithm, mode, backend)

        # decode and decrypt data
        decrypted_data = _decode_base64_to_bytes(message)
        decryptor = cipher.decryptor()
        decrypted_data = (decryptor.update(decrypted_data) +
                          decryptor.finalize())

        # unpad data
        unpadder = PKCS7(128).unpadder()
        decrypted_data = (unpadder.update(decrypted_data) +
                          unpadder.finalize())

        decrypted_data = decrypted_data.decode(encoding='utf-8')

        # Assert
        self.assertEqual(decrypted_data, u'message')

    @record
    def test_put_with_strict_mode(self):
        # Arrange
        queue = self._create_queue()
        kek = KeyWrapper('key1')
        queue.key_encryption_key = kek
        queue.require_encryption = True

        queue.enqueue_message(u'message')
        queue.key_encryption_key = None

        # Assert
        with self.assertRaises(ValueError) as e:
            queue.enqueue_message(u'message')

        self.assertEqual(str(e.exception),
                         "Encryption required but no key was provided.")

    @record
    def test_get_with_strict_mode(self):
        # Arrange
        queue = self._create_queue()
        queue.enqueue_message(u'message')

        queue.require_encryption = True
        queue.key_encryption_key = KeyWrapper('key1')
        with self.assertRaises(ValueError) as e:
            next(queue.receive_messages())

        self.assertEqual(str(e.exception), 'Message was not encrypted.')

    @record
    def test_encryption_add_encrypted_64k_message(self):
        # Arrange
        queue = self._create_queue()
        message = u'a' * 1024 * 64

        # Act
        queue.enqueue_message(message)

        # Assert
        queue.key_encryption_key = KeyWrapper('key1')
        with self.assertRaises(HttpResponseError):
            queue.enqueue_message(message)

    @record
    def test_encryption_nonmatching_kid(self):
        # Arrange
        queue = self._create_queue()
        queue.key_encryption_key = KeyWrapper('key1')
        queue.enqueue_message(u'message')

        # Act
        queue.key_encryption_key.kid = 'Invalid'

        # Assert
        with self.assertRaises(HttpResponseError) as e:
            next(queue.receive_messages())

        self.assertEqual(str(e.exception), "Decryption failed.")
class StorageQueueEncodingTest(QueueTestCase):
    def setUp(self):
        super(StorageQueueEncodingTest, self).setUp()

        queue_url = self._get_queue_url()
        credentials = self._get_shared_key_credential()
        self.qsc = QueueServiceClient(account_url=queue_url, credential=credentials)
        self.test_queues = []

    def tearDown(self):
        if not self.is_playback():
            for queue in self.test_queues:
                try:
                    self.qsc.delete_queue(queue.queue_name)
                except:
                    pass
        return super(StorageQueueEncodingTest, self).tearDown()

    # --Helpers-----------------------------------------------------------------
    def _get_queue_reference(self, prefix=TEST_QUEUE_PREFIX):
        queue_name = self.get_resource_name(prefix)
        queue = self.qsc.get_queue_client(queue_name)
        self.test_queues.append(queue)
        return queue

    def _create_queue(self, prefix=TEST_QUEUE_PREFIX):
        queue = self._get_queue_reference(prefix)
        try:
            created = queue.create_queue()
        except ResourceExistsError:
            pass
        return queue

    def _validate_encoding(self, queue, message):
        # Arrange
        try:
            created = queue.create_queue()
        except ResourceExistsError:
            pass

        # Action.
        queue.enqueue_message(message)

        # Asserts
        dequeued = next(queue.receive_messages())
        self.assertEqual(message, dequeued.content)

    # --------------------------------------------------------------------------

    @record
    def test_message_text_xml(self):
        # Arrange.
        message = u'<message1>'
        queue = self.qsc.get_queue_client(self.get_resource_name(TEST_QUEUE_PREFIX))

        # Asserts
        self._validate_encoding(queue, message)

    @record
    def test_message_text_xml_whitespace(self):
        # Arrange.
        message = u'  mess\t age1\n'
        queue = self.qsc.get_queue_client(self.get_resource_name(TEST_QUEUE_PREFIX))

        # Asserts
        self._validate_encoding(queue, message)

    @record
    def test_message_text_xml_invalid_chars(self):
        # Action.
        queue = self._get_queue_reference()
        message = u'\u0001'

        # Asserts
        with self.assertRaises(HttpResponseError):
            queue.enqueue_message(message)

    @record
    def test_message_text_base64(self):
        # Arrange.
        queue_url = self._get_queue_url()
        credentials = self._get_shared_key_credential()
        queue = QueueClient(
            queue_url=queue_url,
            queue=self.get_resource_name(TEST_QUEUE_PREFIX),
            credential=credentials,
            message_encode_policy=TextBase64EncodePolicy(),
            message_decode_policy=TextBase64DecodePolicy())

        message = u'\u0001'

        # Asserts
        self._validate_encoding(queue, message)

    @record
    def test_message_bytes_base64(self):
        # Arrange.
        queue_url = self._get_queue_url()
        credentials = self._get_shared_key_credential()
        queue = QueueClient(
            queue_url=queue_url,
            queue=self.get_resource_name(TEST_QUEUE_PREFIX),
            credential=credentials,
            message_encode_policy=BinaryBase64EncodePolicy(),
            message_decode_policy=BinaryBase64DecodePolicy())

        message = b'xyz'

        # Asserts
        self._validate_encoding(queue, message)

    @record
    def test_message_bytes_fails(self):
        # Arrange
        queue = self._get_queue_reference()

        # Action.
        with self.assertRaises(TypeError) as e:
            message = b'xyz'
            queue.enqueue_message(message)

        # Asserts
        self.assertTrue(str(e.exception).startswith('Message content must be text'))

    @record
    def test_message_text_fails(self):
        # Arrange
        queue_url = self._get_queue_url()
        credentials = self._get_shared_key_credential()
        queue = QueueClient(
            queue_url=queue_url,
            queue=self.get_resource_name(TEST_QUEUE_PREFIX),
            credential=credentials,
            message_encode_policy=BinaryBase64EncodePolicy(),
            message_decode_policy=BinaryBase64DecodePolicy())

        # Action.
        with self.assertRaises(TypeError) as e:
            message = u'xyz'
            queue.enqueue_message(message)

        # Asserts
        self.assertTrue(str(e.exception).startswith('Message content must be bytes'))

    @record
    def test_message_base64_decode_fails(self):
        # Arrange
        queue_url = self._get_queue_url()
        credentials = self._get_shared_key_credential()
        queue = QueueClient(
            queue_url=queue_url,
            queue=self.get_resource_name(TEST_QUEUE_PREFIX),
            credential=credentials,
            message_encode_policy=TextXMLEncodePolicy(),
            message_decode_policy=BinaryBase64DecodePolicy())
        try:
            queue.create_queue()
        except ResourceExistsError:
            pass
        message = u'xyz'
        queue.enqueue_message(message)

        # Action.
        with self.assertRaises(DecodeError) as e:
            queue.peek_messages()

        # Asserts
        self.assertNotEqual(-1, str(e.exception).find('Message content is not valid base 64'))
Esempio n. 20
0
class AzureFunctionAppBackend:
    """
    A wrap-up around Azure Function Apps backend.
    """
    def __init__(self, config, internal_storage):
        logger.debug("Creating Azure Functions client")
        self.name = 'azure_fa'
        self.type = 'faas'
        self.azure_config = config
        self.invocation_type = self.azure_config['invocation_type']
        self.resource_group = self.azure_config['resource_group']
        self.storage_account_name = self.azure_config['storage_account_name']
        self.storage_account_key = self.azure_config['storage_account_key']
        self.location = self.azure_config['location']
        self.functions_version = self.azure_config['functions_version']

        self.queue_service_url = 'https://{}.queue.core.windows.net'.format(
            self.storage_account_name)
        self.queue_service = QueueServiceClient(
            account_url=self.queue_service_url,
            credential=self.storage_account_key)

        msg = COMPUTE_CLI_MSG.format('Azure Functions')
        logger.info("{} - Location: {}".format(msg, self.location))

    def _format_action_name(self, runtime_name, runtime_memory=None):
        runtime_name = runtime_name.replace('/', '--').replace(':', '--')
        return runtime_name

    def _format_queue_name(self, action_name, q_type):
        runtime_name = action_name.replace('--', '-')
        return runtime_name + '-' + q_type

    def _get_default_runtime_image_name(self):
        py_version = version_str(sys.version_info).replace('.', '')
        revision = 'latest' if 'dev' in __version__ else __version__.replace(
            '.', '')
        runtime_name = '{}-{}-v{}-{}-{}'.format(self.storage_account_name,
                                                az_config.RUNTIME_NAME,
                                                py_version, revision,
                                                self.invocation_type)
        return runtime_name

    def create_runtime(self, docker_image_name, memory, timeout):
        """
        Creates a new runtime into Azure Function Apps
        from the provided Linux image for consumption plan
        """
        default_runtime_img_name = self._get_default_runtime_image_name()
        if docker_image_name in ['default', default_runtime_img_name]:
            # We only build the default image. rest of images must already exist
            # in the docker registry.
            docker_image_name = default_runtime_img_name
            self._build_default_runtime(default_runtime_img_name)

        self._create_function(docker_image_name, memory, timeout)
        metadata = self._generate_runtime_meta(docker_image_name, memory)

        return metadata

    def _build_default_runtime(self, default_runtime_img_name):
        """
        Builds the default runtime
        """
        return self.build_runtime(default_runtime_img_name)

        if os.system('{} --version >{} 2>&1'.format(az_config.DOCKER_PATH,
                                                    os.devnull)) == 0:
            # Build default runtime using local dokcer
            python_version = version_str(sys.version_info)
            dockerfile = "Dockefile.default-azure-runtime"
            with open(dockerfile, 'w') as f:
                f.write(
                    "FROM mcr.microsoft.com/azure-functions/python:3.0-python{}\n"
                    .format(python_version))
                f.write(az_config.DEFAULT_DOCKERFILE)
            self.build_runtime_docker(default_runtime_img_name, dockerfile)
            os.remove(dockerfile)
        else:
            raise Exception('docker command not found. Install docker or use '
                            'an already built runtime')

    def build_runtime(self,
                      runtime_name,
                      requirements_file=None,
                      extra_args=[]):
        try:
            shutil.rmtree(az_config.BUILD_DIR)
        except Exception:
            pass

        action_name = self._format_action_name(runtime_name)

        build_dir = os.path.join(az_config.BUILD_DIR, action_name)
        os.makedirs(build_dir, exist_ok=True)

        logger.info('Building default runtime in {}'.format(build_dir))

        action_dir = os.path.join(build_dir, az_config.ACTION_DIR)
        os.makedirs(action_dir, exist_ok=True)

        req_file = os.path.join(build_dir, 'requirements.txt')
        with open(req_file, 'w') as reqf:
            reqf.write(az_config.REQUIREMENTS_FILE)
            if not is_unix_system():
                if 'dev' in lithops.__version__:
                    reqf.write('git+https://github.com/lithops-cloud/lithops')
                else:
                    reqf.write('lithops=={}'.format(lithops.__version__))

        host_file = os.path.join(build_dir, 'host.json')
        with open(host_file, 'w') as hstf:
            hstf.write(az_config.HOST_FILE)

        fn_file = os.path.join(action_dir, 'function.json')
        if self.invocation_type == 'event':
            with open(fn_file, 'w') as fnf:
                in_q_name = self._format_queue_name(action_name,
                                                    az_config.IN_QUEUE)
                az_config.BINDINGS_QUEUE['bindings'][0][
                    'queueName'] = in_q_name
                out_q_name = self._format_queue_name(action_name,
                                                     az_config.OUT_QUEUE)
                az_config.BINDINGS_QUEUE['bindings'][1][
                    'queueName'] = out_q_name
                fnf.write(json.dumps(az_config.BINDINGS_QUEUE))

        elif self.invocation_type == 'http':
            with open(fn_file, 'w') as fnf:
                fnf.write(json.dumps(az_config.BINDINGS_HTTP))

        entry_point = os.path.join(os.path.dirname(__file__), 'entry_point.py')
        main_file = os.path.join(action_dir, '__init__.py')
        shutil.copy(entry_point, main_file)

        if is_unix_system():
            mod_dir = os.path.join(build_dir, az_config.ACTION_MODULES_DIR)
            os.chdir(build_dir)
            cmd = '{} -m pip install -U -t {} -r requirements.txt'.format(
                sys.executable, mod_dir)
            if logger.getEffectiveLevel() != logging.DEBUG:
                cmd = cmd + " >{} 2>&1".format(os.devnull)
            os.system(cmd)
            create_handler_zip(az_config.FH_ZIP_LOCATION, entry_point,
                               '__init__.py')
            archive = zipfile.ZipFile(az_config.FH_ZIP_LOCATION)
            archive.extractall(path=mod_dir)
            os.remove(mod_dir + '/__init__.py')
            os.remove(az_config.FH_ZIP_LOCATION)

    def _create_function(self, docker_image_name, memory, timeout):
        """
        Create and publish an Azure Functions
        """
        action_name = self._format_action_name(docker_image_name, memory)
        logger.info(
            'Creating new Lithops runtime for Azure Function: {}'.format(
                action_name))

        if self.invocation_type == 'event':
            try:
                in_q_name = self._format_queue_name(action_name,
                                                    az_config.IN_QUEUE)
                logger.debug('Creating queue {}'.format(in_q_name))
                self.queue_service.create_queue(in_q_name)
            except Exception:
                in_queue = self.queue_service.get_queue_client(in_q_name)
                in_queue.clear_messages()
            try:
                out_q_name = self._format_queue_name(action_name,
                                                     az_config.OUT_QUEUE)
                logger.debug('Creating queue {}'.format(out_q_name))
                self.queue_service.create_queue(out_q_name)
            except Exception:
                out_queue = self.queue_service.get_queue_client(out_q_name)
                out_queue.clear_messages()

        python_version = version_str(sys.version_info)
        cmd = (
            'az functionapp create --name {} --storage-account {} '
            '--resource-group {} --os-type Linux  --runtime python '
            '--runtime-version {} --functions-version {} --consumption-plan-location {}'
            .format(action_name, self.storage_account_name,
                    self.resource_group, python_version,
                    self.functions_version, self.location))
        if logger.getEffectiveLevel() != logging.DEBUG:
            cmd = cmd + " >{} 2>&1".format(os.devnull)
        res = os.system(cmd)
        if res != 0:
            raise Exception(
                'There was an error creating the function in Azure. cmd: {}'.
                format(cmd))

        logger.debug('Publishing function: {}'.format(action_name))
        build_dir = os.path.join(az_config.BUILD_DIR, action_name)
        os.chdir(build_dir)
        res = 1
        while res != 0:
            time.sleep(5)
            if is_unix_system():
                cmd = 'func azure functionapp publish {} --python --no-build'.format(
                    action_name)
            else:
                cmd = 'func azure functionapp publish {} --python'.format(
                    action_name)
            if logger.getEffectiveLevel() != logging.DEBUG:
                cmd = cmd + " >{} 2>&1".format(os.devnull)
            res = os.system(cmd)

        time.sleep(10)

    def delete_runtime(self, runtime_name, memory):
        """
        Deletes a runtime
        """
        action_name = self._format_action_name(runtime_name, memory)
        logger.debug('Deleting function app: {}'.format(action_name))
        cmd = ('az functionapp delete --name {} --resource-group {}'.format(
            action_name, self.resource_group))
        if logger.getEffectiveLevel() != logging.DEBUG:
            cmd = cmd + " >{} 2>&1".format(os.devnull)
        os.system(cmd)

        try:
            in_q_name = self._format_queue_name(action_name,
                                                az_config.IN_QUEUE)
            self.queue_service.delete_queue(in_q_name)
        except Exception:
            pass
        try:
            out_q_name = self._format_queue_name(action_name,
                                                 az_config.OUT_QUEUE)
            self.queue_service.delete_queue(out_q_name)
        except Exception:
            pass

    def invoke(self,
               docker_image_name,
               memory=None,
               payload={},
               return_result=False):
        """
        Invoke function
        """
        action_name = self._format_action_name(docker_image_name, memory)
        if self.invocation_type == 'event':

            in_q_name = self._format_queue_name(action_name,
                                                az_config.IN_QUEUE)
            in_queue = self.queue_service.get_queue_client(in_q_name)
            msg = in_queue.send_message(dict_to_b64str(payload))
            activation_id = msg.id

            if return_result:
                out_q_name = self._format_queue_name(action_name,
                                                     az_config.OUT_QUEUE)
                out_queue = self.queue_service.get_queue_client(out_q_name)
                msg = []
                while not msg:
                    time.sleep(1)
                    msg = out_queue.receive_message()
                out_queue.clear_messages()
                return b64str_to_dict(msg.content)

        elif self.invocation_type == 'http':
            endpoint = "https://{}.azurewebsites.net".format(action_name)
            parsed_url = urlparse(endpoint)
            ctx = ssl._create_unverified_context()
            conn = http.client.HTTPSConnection(parsed_url.netloc, context=ctx)

            route = "/api/lithops_handler"
            if return_result:
                conn.request("GET",
                             route,
                             body=json.dumps(payload, default=str))
                resp = conn.getresponse()
                data = json.loads(resp.read().decode("utf-8"))
                conn.close()
                return data
            else:
                # logger.debug('Invoking calls {}'.format(', '.join(payload['call_ids'])))
                conn.request("POST",
                             route,
                             body=json.dumps(payload, default=str))
                resp = conn.getresponse()
                if resp.status == 429:
                    time.sleep(0.2)
                    conn.close()
                    return None
                activation_id = resp.read().decode("utf-8")
                conn.close()

        return activation_id

    def get_runtime_key(self, docker_image_name, runtime_memory):
        """
        Method that creates and returns the runtime key.
        Runtime keys are used to uniquely identify runtimes within the storage,
        in order to know which runtimes are installed and which not.
        """
        action_name = self._format_action_name(docker_image_name,
                                               runtime_memory)
        runtime_key = os.path.join(self.name, action_name)

        return runtime_key

    def clean(self):
        """
        Deletes all Lithops Azure Function Apps runtimes
        """
        logger.debug('Deleting all runtimes')

        runtimes = self.list_runtimes()

        for runtime in runtimes:
            runtime_name, runtime_memory = runtime
            self.delete_runtime(runtime_name, runtime_memory)

    def _generate_runtime_meta(self, docker_image_name, memory):
        """
        Extract installed Python modules from Azure runtime
        """
        logger.info(
            "Extracting Python modules from: {}".format(docker_image_name))
        payload = {
            'log_level': logger.getEffectiveLevel(),
            'get_preinstalls': True
        }

        try:
            runtime_meta = self.invoke(docker_image_name,
                                       memory=memory,
                                       payload=payload,
                                       return_result=True)
        except Exception:
            raise Exception("Unable to invoke 'extract-preinstalls' action")

        if not runtime_meta or 'preinstalls' not in runtime_meta:
            raise Exception(runtime_meta)

        logger.debug("Extracted metadata succesfully")
        return runtime_meta

    def list_runtimes(self, docker_image_name='all'):
        """
        List all the Azure Function Apps deployed.
        return: Array of tuples (function_name, memory)
        """
        logger.debug('Listing all functions deployed...')

        functions = []
        response = os.popen(
            'az functionapp list --query "[].defaultHostName\"').read()
        response = json.loads(response)

        for function in response:
            function = function.replace('.azurewebsites.net', '')
            if docker_image_name == function or docker_image_name == 'all':
                functions.append((function, ''))

        logger.debug('Listed {} functions'.format(len(functions)))
        return functions
Esempio n. 21
0
from dotenv import load_dotenv, find_dotenv

load_dotenv(find_dotenv())

sleep = getenv("AZURE_STORAGE_QUEUE_RECEIVE_SLEEP", type=bool, default=1)

credential = ChainedTokenCredential(
    ManagedIdentityCredential(), AzureCliCredential()
)

queue_service_client = QueueServiceClient(
    account_url=getenv("AZURE_STORAGE_QUEUE_ENDPOINT"), credential=credential
)

queue_client = queue_service_client.get_queue_client(
    queue=getenv("AZURE_STORAGE_QUEUE_NAME", default="messages")
)

client_sync_queue_client = queue_service_client.get_queue_client(
    queue=getenv("AZURE_STORAGE_CLIENT_SYNC_QUEUE_NAME", default="sync")
)

fr_client = FormRecognizerClient(
    endpoint=getenv("AZURE_FORM_RECOGNIZER_ENDPOINT"), credential=credential
)

ta_client = TextAnalyticsClient(
    endpoint=getenv("AZURE_TEXT_ANALYTICS_ENDPOINT"), credential=credential
)

secret_client = SecretClient(