def ingest_from_multiple_blobs(self, blobs, delete_sources_on_success, ingestion_properties): """ Enqueuing an ingest command from azure blobs. Parameters ---------- blobs : List of BlobDescriptor. The list of blobs to be ingested. Please provide the raw blob size to each of the descriptors. delete_sources_on_success : bool. After a successful ingest, whether to delete the origin files. ingestion_properties : kusto_ingest_client.ingestion_properties.IngestionProperties The ingestion properties. """ for blob in blobs: queues = self._resource_manager.get_ingestion_queues() queue_details = random.choice(queues) storage_client = CloudStorageAccount( queue_details.storage_account_name, sas_token=queue_details.sas) queue_service = storage_client.create_queue_service() authorization_context = self._resource_manager.get_authorization_context( ) ingestion_blob_info = _IngestionBlobInfo( blob, ingestion_properties, delete_sources_on_success, authorization_context) ingestion_blob_info_json = ingestion_blob_info.to_json() encoded = base64.b64encode( ingestion_blob_info_json.encode("utf-8")).decode("utf-8") queue_service.put_message(queue_details.object_name, encoded)
def main(): print("Hello World!") try: account = CloudStorageAccount(config.STORAGE_ACCOUNT_NAME, config.STORAGE_ACCOUNT_KEY) queue_service = account.create_queue_service() queue_service.create_queue(config.STORAGE_QUEUE_NAME) while True: try: messages = queue_service.get_messages( config.STORAGE_QUEUE_NAME) for message in messages: print('Message for dequeueing is: ', message.content) # Then delete it. # When queue is deleted all messages are deleted, here is done for demo purposes # Deleting requires the message id and pop receipt (returned by get_messages) queue_service.delete_message(config.STORAGE_QUEUE_NAME, message.id, message.pop_receipt) print('Successfully dequeued message') except Exception as e: print('Error occurred get_messages:', e) continue except Exception as e: print('Error occurred:', e)
def deleteQueue(self, queueName, storageAccountName, storageKey): logging.info('Attempting deletion of queue: %s', queueName) account = CloudStorageAccount(storageAccountName, storageKey) queue_service = account.create_queue_service() if queue_service.exists(queueName): queue_service.delete_queue(queueName) logging.info('Successfully deleted queue: %s', queueName)
def createQueue(self, queueName, resourceGroupName, storageAccountName, storageKey, subscriptionId): logging.info("Creating new Queue with Name: " + storageAccountName + " inside Storage Account: " + storageAccountName) self.__waitForStorageWithRetry(resourceGroupName, storageAccountName, subscriptionId, 5) account = CloudStorageAccount(storageAccountName, storageKey) queue_service = account.create_queue_service() queue_service.create_queue(queueName) logging.info("Queue created successfully with Name: " + storageAccountName + " inside Storage Account: " + storageAccountName)
def send_message(message): configuration = getConfiguration() account = CloudStorageAccount(account_name=configuration['account_name'], account_key=configuration['account_key']) service = account.create_queue_service() service.create_queue(configuration['queue_name']) service.put_message(configuration['queue_name'], message)
def __init__(self, account: CloudStorageAccount): self._logger = logging.getLogger("liteflow.providers.azure") self._service = account.create_queue_service() self._queues: Dict[str] = { WORKFLOW_QUEUE: "workflow", EVENT_QUEUE: "event" } for queue in self._queues: try: self._logger.log(logging.DEBUG, f"Creating queue {self._queues[queue]}") self._service.create_queue(self._queues[queue]) except: pass
def ingest_from_blob(self, blob_descriptor: BlobDescriptor, ingestion_properties: IngestionProperties): """ Enqueue an ingest command from azure blobs. To learn more about ingestion methods go to: https://docs.microsoft.com/en-us/azure/data-explorer/ingest-data-overview#ingestion-methods :param azure.kusto.ingest.BlobDescriptor blob_descriptor: An object that contains a description of the blob to be ingested. :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties. """ queues = self._resource_manager.get_ingestion_queues() queue_details = random.choice(queues) storage_client = CloudStorageAccount(queue_details.storage_account_name, sas_token=queue_details.sas) queue_service = storage_client.create_queue_service() authorization_context = self._resource_manager.get_authorization_context() ingestion_blob_info = _IngestionBlobInfo(blob_descriptor, ingestion_properties=ingestion_properties, auth_context=authorization_context) ingestion_blob_info_json = ingestion_blob_info.to_json() encoded = base64.b64encode(ingestion_blob_info_json.encode("utf-8")).decode("utf-8") queue_service.put_message(queue_name=queue_details.object_name, content=encoded)
def ingest_from_blob(self, blob, ingestion_properties): """Enqueuing an ingest command from azure blobs. :param files: List of BlobDescriptor. The list of blobs to be ingested. Please provide the raw blob size to each of the descriptors. :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties. """ queues = self._resource_manager.get_ingestion_queues() queue_details = random.choice(queues) storage_client = CloudStorageAccount( queue_details.storage_account_name, sas_token=queue_details.sas) queue_service = storage_client.create_queue_service() authorization_context = self._resource_manager.get_authorization_context( ) ingestion_blob_info = _IngestionBlobInfo( blob, ingestion_properties=ingestion_properties, auth_context=authorization_context) ingestion_blob_info_json = ingestion_blob_info.to_json() encoded = base64.b64encode( ingestion_blob_info_json.encode("utf-8")).decode("utf-8") queue_service.put_message(queue_name=queue_details.object_name, content=encoded)
class StorageAccountTest(StorageTestCase): def setUp(self): super(StorageAccountTest, self).setUp() self.account_name = self.settings.STORAGE_ACCOUNT_NAME self.account_key = self.settings.STORAGE_ACCOUNT_KEY self.sas_token = '?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D' self.account = CloudStorageAccount(self.account_name, self.account_key) # --Helpers----------------------------------------------------------------- def validate_service(self, service, type): self.assertIsNotNone(service) self.assertIsInstance(service, type) self.assertEqual(service.account_name, self.account_name) self.assertEqual(service.account_key, self.account_key) # --Test cases -------------------------------------------------------- def test_create_block_blob_service(self): # Arrange # Act service = self.account.create_block_blob_service() # Assert self.validate_service(service, BlockBlobService) def test_create_page_blob_service(self): # Arrange # Act service = self.account.create_page_blob_service() # Assert self.validate_service(service, PageBlobService) def test_create_append_blob_service(self): # Arrange # Act service = self.account.create_append_blob_service() # Assert self.validate_service(service, AppendBlobService) def test_create_queue_service(self): # Arrange # Act service = self.account.create_queue_service() # Assert self.validate_service(service, QueueService) def test_create_file_service(self): # Arrange # Act service = self.account.create_file_service() # Assert self.validate_service(service, FileService) def test_create_service_no_key(self): # Arrange # Act bad_account = CloudStorageAccount('', '') with self.assertRaises(ValueError): service = bad_account.create_block_blob_service() # Assert def test_create_account_sas(self): # Arrange # Act sas_account = CloudStorageAccount(self.account_name, sas_token=self.sas_token) service = sas_account.create_block_blob_service() # Assert self.assertIsNotNone(service) self.assertEqual(service.account_name, self.account_name) self.assertIsNone(service.account_key) self.assertEqual(service.sas_token, self.sas_token) def test_create_account_sas_and_key(self): # Arrange # Act account = CloudStorageAccount(self.account_name, self.account_key, self.sas_token) service = account.create_block_blob_service() # Assert self.validate_service(service, BlockBlobService) def test_create_account_emulated(self): # Arrange # Act account = CloudStorageAccount(is_emulated=True) service = account.create_block_blob_service() # Assert self.assertIsNotNone(service) self.assertEqual(service.account_name, 'devstoreaccount1') self.assertIsNotNone(service.account_key) @record def test_generate_account_sas(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange token = self.account.generate_shared_access_signature( Services.BLOB, ResourceTypes.OBJECT, AccountPermissions.READ, datetime.utcnow() + timedelta(hours=1), ) service = self.account.create_block_blob_service() data = b'shared access signature with read permission on blob' container_name = 'container1' blob_name = 'blob1.txt' try: service.create_container(container_name) service.create_blob_from_bytes(container_name, blob_name, data) # Act url = service.make_blob_url( container_name, blob_name, sas_token=token, ) response = requests.get(url) # Assert self.assertTrue(response.ok) self.assertEqual(data, response.content) finally: service.delete_container(container_name)
def checkQueueExists(self, queueName, storageAccountName, storageKey): account = CloudStorageAccount(storageAccountName, storageKey) queue_service = account.create_queue_service() return queue_service.exists(queueName)
class StorageAccountTest(StorageTestCase): def setUp(self): super(StorageAccountTest, self).setUp() self.sas_token = '?sv=2015-04-05&st=2015-04-29T22%3A18%3A26Z&se=2015-04-30T02%3A23%3A26Z&sr=b&sp=rw&sip=168.1.5.60-168.1.5.70&spr=https&sig=Z%2FRHIX5Xcg0Mq2rqI3OlWTjEg2tYkboXr1P9ZUXDtkk%3D' if self.settings.IS_EMULATED: self.account_name = DEV_ACCOUNT_NAME self.account_key = DEV_ACCOUNT_KEY self.protocol = "http" self.account = CloudStorageAccount(DEV_ACCOUNT_NAME, DEV_ACCOUNT_KEY, is_emulated=True) else: self.account_name = self.settings.STORAGE_ACCOUNT_NAME self.account_key = self.settings.STORAGE_ACCOUNT_KEY self.account = CloudStorageAccount(self.account_name, self.account_key) self.protocol = self.settings.PROTOCOL # --Helpers----------------------------------------------------------------- def validate_service(self, service, type): self.assertIsNotNone(service) self.assertIsInstance(service, type) self.assertEqual(service.account_name, self.account_name) self.assertEqual(service.account_key, self.account_key) # --Test cases -------------------------------------------------------- def test_create_block_blob_service(self): # Arrange # Act service = self.account.create_block_blob_service() # Assert self.validate_service(service, BlockBlobService) def test_create_page_blob_service(self): # Arrange # Act service = self.account.create_page_blob_service() # Assert self.validate_service(service, PageBlobService) def test_create_append_blob_service(self): # Arrange # Act service = self.account.create_append_blob_service() # Assert self.validate_service(service, AppendBlobService) def test_create_queue_service(self): # Arrange # Act service = self.account.create_queue_service() # Assert self.validate_service(service, QueueService) @not_for_emulator def test_create_file_service(self): # Arrange # Act service = self.account.create_file_service() # Assert self.validate_service(service, FileService) def test_create_service_no_key(self): # Arrange # Act bad_account = CloudStorageAccount('', '') with self.assertRaises(ValueError): service = bad_account.create_block_blob_service() # Assert def test_create_account_sas(self): # Arrange # Act sas_account = CloudStorageAccount(self.account_name, sas_token=self.sas_token) service = sas_account.create_block_blob_service() # Assert self.assertIsNotNone(service) self.assertEqual(service.account_name, self.account_name) self.assertIsNone(service.account_key) self.assertEqual(service.sas_token, self.sas_token) def test_create_account_sas_and_key(self): # Arrange # Act account = CloudStorageAccount(self.account_name, self.account_key, self.sas_token) service = account.create_block_blob_service() # Assert self.validate_service(service, BlockBlobService) def test_create_account_emulated(self): # Arrange # Act account = CloudStorageAccount(is_emulated=True) service = account.create_block_blob_service() # Assert self.assertIsNotNone(service) self.assertEqual(service.account_name, 'devstoreaccount1') self.assertIsNotNone(service.account_key) @record def test_generate_account_sas(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange token = self.account.generate_shared_access_signature( Services.BLOB, ResourceTypes.OBJECT, AccountPermissions.READ, datetime.utcnow() + timedelta(hours=1), ) service = self.account.create_block_blob_service() data = b'shared access signature with read permission on blob' container_name = self.get_resource_name("container") blob_name = 'blob1.txt' try: service.create_container(container_name) service.create_blob_from_bytes(container_name, blob_name, data) # Act url = service.make_blob_url( container_name, blob_name, sas_token=token, ) response = requests.get(url) # Assert self.assertTrue(response.ok) self.assertEqual(data, response.content) finally: service.delete_container(container_name) @record def test_account_sas_with_question_mark_prefix(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange token = '?' + self.account.generate_shared_access_signature( Services.BLOB, ResourceTypes.OBJECT + ResourceTypes.CONTAINER, AccountPermissions.READ + AccountPermissions.WRITE + AccountPermissions.DELETE + AccountPermissions.CREATE, datetime.utcnow() + timedelta(hours=1), ) service = BlockBlobService(self.account_name, sas_token=token, is_emulated=self.settings.IS_EMULATED) data = b'shared access signature with read/write permission on blob' container_name = self.get_resource_name("container") blob_name = 'blob1.txt' try: # Act service.create_container(container_name) service.create_blob_from_bytes(container_name, blob_name, data) blob = service.get_blob_to_bytes(container_name, blob_name) # Assert self.assertIsNotNone(blob) self.assertEqual(data, blob.content) finally: service.delete_container(container_name) @record def test_generate_account_sas_with_multiple_permissions(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Validate both + and | generate the same permissions permissions = AccountPermissions.READ + AccountPermissions.WRITE permissions_or = AccountPermissions.READ | AccountPermissions.WRITE self.assertEqual(str(permissions), str(permissions_or)) # Arrange token = self.account.generate_shared_access_signature( Services.BLOB, ResourceTypes.OBJECT, permissions, datetime.utcnow() + timedelta(hours=1), ) service_with_key = self.account.create_block_blob_service() service_with_sas = BlockBlobService( account_name=self.account_name, sas_token=token, is_emulated=self.settings.IS_EMULATED) data = b'shared access signature with read/write permission on blob' container_name = self.get_resource_name("container") blob_name = 'blob1.txt' try: # Act Write service_with_key.create_container(container_name) # wait a few seconds to allow the container to be created self.sleep(5) resp = service_with_sas.create_blob_from_text( container_name, blob_name, data) # Assert Write self.assertIsNotNone(resp.etag) self.assertIsNotNone(resp.last_modified) # Act Read blob = service_with_sas.get_blob_to_bytes(container_name, blob_name) # Assert Read self.assertIsNotNone(blob.content) self.assertEqual(data, blob.content) finally: service_with_key.delete_container(container_name) @record def test_generate_account_sas_with_multiple_services(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange token = self.account.generate_shared_access_signature( Services.BLOB + Services.TABLE, ResourceTypes.SERVICE + ResourceTypes.OBJECT, AccountPermissions.READ + AccountPermissions.WRITE, datetime.utcnow() + timedelta(hours=1), ) self.assertTrue('ss=bt' in token) # Act Table # this needs to be hard coded as the table package is no longer maintained here url = '{}://{}/?restype=service&comp=properties&{}'.format( self.protocol, self.account_name + ".table.core.windows.net" if not self.settings.IS_EMULATED else "127.0.0.1:10002/" + DEV_ACCOUNT_NAME, token, ) response = requests.get(url) # Assert Table self.assertTrue(response.ok) # Act Blob service_with_key = self.account.create_block_blob_service() service_with_sas = BlockBlobService( account_name=self.account_name, sas_token=token, is_emulated=self.settings.IS_EMULATED) data = b'shared access signature with read/write permission on blob' container_name = self.get_resource_name("container") blob_name = 'blob1.txt' try: # Act Write service_with_key.create_container(container_name) resp = service_with_sas.create_blob_from_text( container_name, blob_name, data) # Assert Write self.assertIsNotNone(resp.etag) self.assertIsNotNone(resp.last_modified) # Act Read blob = service_with_sas.get_blob_to_bytes(container_name, blob_name) # Assert Read self.assertIsNotNone(blob.content) self.assertEqual(data, blob.content) finally: service_with_key.delete_container(container_name)