class QueueBase(object): """docstring for QueueBase""" def __init__(self): super(QueueBase, self).__init__() self.queue_service = QueueService( account_name='boburstorage', account_key= 'wRgukLsyhLtnI7qEk8mSGnIBC+IsiTTXEDF1/xnmBGDudJLSeYdtyuVzuSN5/cplJz88AJPyoVyjCmL9N1ECXw==' ) def save_message_to_queue(self, queue, data): self.queue_service.put_message(queue, data) print('url added into queue...\n') def get_data_from_url(self): print('reading data from blob using url in queue...\n') data = self.queue_service.get_messages('taskqueue') response = urllib2.urlopen(data[0].content) numbers = response.read() self.queue_service.delete_message('taskqueue', data[0].id, data[0].pop_receipt) return numbers.splitlines() def get_messages_from_queue(self, queue): return self.queue_service.get_messages(queue) def delete_message_from_queue(self, queue, message): self.queue_service.delete_message(queue, message.id, message.pop_receipt)
class QueueBase(object): """docstring for QueueBase""" def __init__(self): super(QueueBase, self).__init__() self.queue_service = QueueService( account_name='bobur', account_key= '6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw==' ) def save_message_to_queue(self, queue, data): self.queue_service.put_message(queue, data) print('url added into queue...\n') def get_data_from_url(self): print('reading data from blob using url in queue...\n') data = self.queue_service.get_messages('taskqueue') response = urllib2.urlopen(data[0].content) numbers = response.read() self.queue_service.delete_message('taskqueue', data[0].id, data[0].pop_receipt) return numbers.splitlines() def get_messages_from_queue(self, queue): return self.queue_service.get_messages(queue) def delete_message_from_queue(self, queue, message): self.queue_service.delete_message(queue, message.id, message.pop_receipt)
class SimulationListener(Thread): def __init__(self): Thread.__init__(self) self._quit = Event() self.daemon = True self.log = logging.getLogger(__name__) settings = Settings() self.create_queue = 'create-simulated-machine' self.destroy_queue = 'destroy-simulated-machine' self.queue_service = QueueService( account_name=settings.azure_queue_account, account_key=settings.azure_queue_key ) self.queue_service.create_queue(self.create_queue) self.queue_service.create_queue(self.destroy_queue) def run(self): # dislike of unstoppable threads while not self._quit.is_set(): try: messages = self.queue_service.get_messages(self.create_queue, numofmessages=10) for m in messages: machine_json = b64decode(m.message_text) machine = json.loads(machine_json) self.log.info("Creating: " + machine["Name"] + " on " + machine["Provider"]) self.log.debug(machine_json) requests.post("http://localhost:8080/machines", machine_json) self.queue_service.delete_message(self.create_queue, m.message_id, m.pop_receipt) except Exception as e: self.log.error(e.message) try: messages = self.queue_service.get_messages(self.destroy_queue, numofmessages=10) for m in messages: machine_json = b64decode(m.message_text) machine = json.loads(machine_json) self.log.info("Deleting: " + machine["Name"] + " on " + machine["Provider"]) self.log.debug(machine_json) requests.delete("http://localhost:8080/machines/" + machine["MachineId"]) self.queue_service.delete_message(self.destroy_queue, m.message_id, m.pop_receipt) except Exception as e: self.log.error(e.message) sleep(1) def quit(self): self._quit.set()
def main(): storage_account_name = str(sys.argv[1]) queue_name = str(sys.argv[2]) destination_folder = str(sys.argv[3]) if not storage_account_name or not queue_name or not destination_folder: print("usage: get_message.py storage_account_name queue_name destination_folder") sys.exit(1) base64_regex = re.compile("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)?$") key_file = "{0}/{1}/acc_key.txt".format(destination_folder, storage_account_name) try : with open(key_file, "r") as f: account_key = f.read() except IOError: print("file not found or empty ", key_file) queue_service = QueueService(storage_account_name, account_key) metadata = queue_service.get_queue_metadata(queue_name) approximate_message_count = metadata.approximate_message_count print('fetching approximately {0} messages'.format(approximate_message_count)) messages = queue_service.get_messages(queue_name) count = len(messages) total = 0 while count > 0: for message in messages: if base64_regex.match(message.content): msg = base64.b64decode(message.content) # Received message is encoded in base 64 data = json.loads(msg.decode('utf8')) else: msg = message.content data = json.loads(msg) filename, folder = get_file_name(storage_account_name, queue_name, destination_folder) if not os.path.exists(folder): os.makedirs(folder) with open(r'{0}/{1}'.format(folder,filename), 'w') as f: json.dump(data, f, indent=2) queue_service.delete_message(queue_name, message.id, message.pop_receipt) count -= 1 total += 1 print('successfully processed {0} messages of ~{1} from {2} queue {3}'.format(total,approximate_message_count,storage_account_name,queue_name)) messages = queue_service.get_messages(queue_name) count = len(messages)
class AzureProvider(BaseProvider): """Queue Provider for the Microsoft Azure.""" def __init__(self, account_name, account_key): self.queue_service = QueueService( account_name=account_name, account_key=account_key ) def put_message(self, queue_name: str, message: str): self.queue_service.put_message( queue_name=queue_name, message_text=message, ) def get_message(self, queue_name: str): try: queue_message = self.queue_service.get_messages( queue_name=queue_name, numofmessages=1 ).queue_messages[0] except IndexError: return {} else: return { attr_name: getattr(queue_message, attr_name) for attr_name in dir(queue_message) if not attr_name.startswith('_') } def delete_message(self, queue_name: str, message: dict): message_id = message.get('message_id') pop_receipt = message.get('pop_receipt') self.queue_service.delete_message( queue_name, message_id=message_id, popreceipt=pop_receipt, )
class AzureQueue(object): def __init__(self, queue_name): self.conn = QueueService(account_name=os.getenv('AZURE_ACCOUNT_NAME'), account_key=os.getenv('AZURE_ACCOUNT_KEY')) self.queue_name = queue_name self.conn.create_queue(queue_name) self.conn.encode_function = QueueMessageFormat.binary_base64encode self.conn.decode_function = QueueMessageFormat.binary_base64decode def enqueue(self, func, *args, **kwargs): task = SimpleTask(func, *args, **kwargs) serialized_task = pickle.dumps(task, protocol=pickle.HIGHEST_PROTOCOL) self.conn.put_message(self.queue_name, serialized_task) return task.id def dequeue(self): messages = self.conn.get_messages(self.queue_name) if len(messages) == 1: serialized_task = messages[0] task = pickle.loads(serialized_task.content) self.conn.delete_message(self.queue_name, serialized_task.id, serialized_task.pop_receipt) return task def get_length(self): metadata = self.conn.get_queue_metadata(self.queue_name) return metadata.approximate_message_count
def test_sas_process(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recordingfile(self.test_mode): return # Arrange queue_name = self._create_queue() self.qs.put_message(queue_name, u'message1') token = self.qs.generate_queue_shared_access_signature( queue_name, QueuePermissions.PROCESS, datetime.utcnow() + timedelta(hours=1), ) # Act service = QueueService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_service_options(service, self.settings) result = service.get_messages(queue_name) # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.id) self.assertEqual(u'message1', message.content)
def sas_with_signed_identifiers(self): queue_name = self._create_queue() self.service.put_message(queue_name, u'message1') # Set access policy on queue access_policy = AccessPolicy(permission=QueuePermissions.PROCESS, expiry=datetime.utcnow() + timedelta(hours=1)) identifiers = {'id': access_policy} acl = self.service.set_queue_acl(queue_name, identifiers) # Wait 30 seconds for acl to propagate time.sleep(30) # Indicates to use the access policy set on the queue token = self.service.generate_queue_shared_access_signature( queue_name, id='id' ) # Create a service and use the SAS sas_service = QueueService( account_name=self.account.account_name, sas_token=token, ) messages = sas_service.get_messages(queue_name) for message in messages: print(message.content) # message1 self.service.delete_queue(queue_name)
def test_sas_process(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recordingfile(self.test_mode): return # Arrange self.qs.put_message(self.test_queues[0], 'message1') token = self.qs.generate_shared_access_signature( self.test_queues[0], self._get_shared_access_policy(QueueSharedAccessPermissions.PROCESS), ) # Act service = QueueService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_service_options(service, self.settings) result = service.get_messages(self.test_queues[0]) # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.message_id) self.assertEqual('message1', message.message_text)
def getmessage(): # Get messages from Azure queue. queue_service = QueueService(account_name=azurestoracct, account_key=azurequeuekey) messages = queue_service.get_messages(azurequeue, num_messages=5) return messages
def render_video(request): template = loader.get_template('app/render_video.html') vidstatus = 'No Video Found.' queue_service = QueueService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY']) messages = queue_service.get_messages(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], num_messages=1, visibility_timeout=1*60) for message in messages: vidstatus = 'Queued for Rendering: ' + message.content message_obj = json.loads(message.content) access_token = ams_authenticate()['access_token'] asset = ams_post_request(access_token, "Assets", { 'Name': message_obj['filename'], 'AlternateId': message_obj['folder']}) asset_container = urllib.parse.urlparse(asset['Uri']).path[1:] asset_file = ams_post_request(access_token, "Files", { 'IsEncrypted': 'false', 'IsPrimary': 'false', 'MimeType': 'video/mp4', 'ContentFileSize': message_obj['size'], 'Name': message_obj['filename'], 'ParentAssetId': asset['Id']}) block_blob_service = BlockBlobService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY']) from_url = block_blob_service.make_blob_url(os.environ['SVPD_STORAGE_ACCOUNT_UPLOADED'], message_obj['folder'] + '/' + message_obj['filename']) block_blob_service.copy_blob(asset_container, message_obj['filename'], from_url) job = ams_verbose_post_request(access_token, "Jobs", { 'Name': message_obj['filename'], 'InputMediaAssets': [{ '__metadata': { 'uri': os.environ['AMS_API_ENDPOINT'] + 'Assets(\'' + asset['Id'] + '\')' } }], 'Tasks': [{ 'Name': 'Adaptive Streaming Task', 'Configuration': 'Adaptive Streaming', 'MediaProcessorId': 'nb:mpid:UUID:ff4df607-d419-42f0-bc17-a481b1331e56', 'TaskBody': '<?xml version="1.0" encoding="utf-16"?><taskBody><inputAsset>JobInputAsset(0)</inputAsset><outputAsset assetCreationOptions="0" assetFormatOption="0" assetName="' + message_obj['filename'] + ' - MES v1.1" storageAccountName="' + os.environ['SVPD_STORAGE_ACCOUNT_NAME'] + '">JobOutputAsset(0)</outputAsset></taskBody>' },{ 'Name': 'Indexing Task', 'Configuration': '<?xml version="1.0" encoding="utf-8"?><configuration version="2.0"><input><metadata key="title" value="blah" /></input><settings></settings><features><feature name="ASR"><settings><add key="Language" value="English" /><add key="GenerateAIB" value="False" /><add key="GenerateKeywords" value="True" /><add key="ForceFullCaption" value="False" /><add key="CaptionFormats" value="ttml;sami;webvtt" /></settings></feature></features></configuration>', 'MediaProcessorId': 'nb:mpid:UUID:233e57fc-36bb-4f6f-8f18-3b662747a9f8', 'TaskBody': '<?xml version="1.0" encoding="utf-16"?><taskBody><inputAsset>JobInputAsset(0)</inputAsset><outputAsset assetCreationOptions="0" assetFormatOption="0" assetName="' + message_obj['filename'] + ' - Indexed" storageAccountName="' + os.environ['SVPD_STORAGE_ACCOUNT_NAME'] + '">JobOutputAsset(1)</outputAsset></taskBody>' }] }) queue_service.put_message(os.environ['SVPD_STORAGE_ACCOUNT_ENCODING'], json.dumps({ 'filename': message_obj['filename'], 'folder': message_obj['folder'], 'size': message_obj['size'], 'job': job['d']})) queue_service.delete_message(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], message.id, message.pop_receipt) return HttpResponse(template.render({ 'vidstatus': vidstatus, }, request))
def consume(args): # Get the service resources queue_service = QueueService( account_name=os.environ.get('QUEUE_STORAGE_ACCOUNT'), account_key=os.environ.get('QUEUE_STORAGE_ACCESS_KEY')) queue_service.decode_function = QueueMessageFormat.binary_base64decode logger.debug('Listening for messages on {}'.format(args.azure_queue)) while True: messages = queue_service.get_messages( args.azure_queue, num_messages=16, visibility_timeout=args.wait_time) for message in messages: try: process(args, message) except AzureException as e: logger.exception(e) if not args.dry_run: logger.debug('deleting message {}'.format(message.id)) queue_service.delete_message(args.azure_queue, message.id, message.pop_receipt) time.sleep(args.wait_time)
def test_sas_process(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recording_file(self.test_mode): return # Arrange queue_name = self._create_queue() self.qs.put_message(queue_name, u'message1') token = self.qs.generate_queue_shared_access_signature( queue_name, QueuePermissions.PROCESS, datetime.utcnow() + timedelta(hours=1), ) # Act service = QueueService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_service_options(service, self.settings) result = service.get_messages(queue_name) # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.id) self.assertEqual(u'message1', message.content)
def test_sas_process(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recordingfile(self.test_mode): return # Arrange self.qs.put_message(self.test_queues[0], 'message1') token = self.qs.generate_shared_access_signature( self.test_queues[0], self._get_shared_access_policy( QueueSharedAccessPermissions.PROCESS), ) # Act service = QueueService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_service_options(service, self.settings) result = service.get_messages(self.test_queues[0]) # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.message_id) self.assertEqual('message1', message.message_text)
def scrapping(): queue_service = QueueService(account_name=account_name, account_key=account_key) while True: messages = queue_service.get_messages('monitoring') if messages: for message in messages: print(f"Receiving user {message.content} from monitoring service") queue_service.delete_message('monitoring', message.id, message.pop_receipt) time.sleep(30)
class AzureStorageQueue(Queue): """Interface for interacting with an Azure Storage Queue (through the Queue contract)""" def __init__(self, queue_name, config: AzureStorageConfig): """Initializes the storage queue. :param queue_name: The name of the queue to access. If a queue with this name doesn't already exist on the storage account, the queue will be created on the first operation. :param config: AzureStorageConfig with a valid account name and account key """ self._queue_name = queue_name self._queue_service = QueueService(account_name=config.account_name, account_key=config.account_key) self._queue_service.encode_function = \ QueueMessageFormat.text_base64encode self._queue_service.decode_function = \ QueueMessageFormat.text_base64decode def push(self, message): """Pushes a new message onto the queue.""" self._queue_service.create_queue(self._queue_name) self._queue_service.put_message(self._queue_name, message) def pop(self): """Pops the first message from the queue and returns it.""" self._queue_service.create_queue(self._queue_name) # get_messages prevents another client from getting the message # before we've had a chance to delete it. The visibility_timeout # prevents the message from being seen by other clients # for X seconds. messages = self._queue_service.get_messages(self._queue_name, visibility_timeout=30) for message in messages: result = message.content self._queue_service.delete_message(self._queue_name, message.id, message.pop_receipt) return result def peek(self): """Peeks the fist message from the queue and returns it.""" self._queue_service.create_queue(self._queue_name) messages = self._queue_service.peek_messages(self._queue_name) for message in messages: return message.content @staticmethod def create(queue_name: str): """Helper function for creating a Azure Storage Queue from the storage_config property defined inside of AzureConfig.""" azure_config = AzureConfig() return AzureStorageQueue(queue_name, azure_config.storage_config)
class CheckinQueueClient(object): """Obtains next checkin object from queue""" queueName = 'checkinqueue' def __init__(self, account_name, account_key): self._queueService = QueueService(account_name, account_key) def startCheckinProcess(processData): while(True): message = self._queueService.get_messages(queueName, num_messages=1, visibility_timeout=30) processData(message)
def get_new_proxy(): queue_service = QueueService(account_name=account_name, account_key=account_key) messages = queue_service.get_messages('proxies') if messages: for message in messages: new_proxy = message.content queue_service.delete_message('proxies', message.id, message.pop_receipt) queue_service.put_message('proxies', new_proxy) return new_proxy
class RegistrationQueueClient(object): """Obtains next registration object from queue""" queueName = 'registrationsqueue' def __init__(self, account_name, account_key): self._queueService = QueueService(account_name, account_key) print("RegistrationQueue Initialized") def LookupTicket(self, processData): message = self._queueService.get_messages(self.queueName, num_messages=1) if(processData(message)): self._queueService.delete_message(self.queueName, message.message_id, message.pop_receipt)
class CheckinQueueClient(object): """Obtains next checkin object from queue""" queueName = 'checkinqueue' def __init__(self, account_name, account_key): self._queueService = QueueService(account_name, account_key) def startCheckinProcess(processData): while (True): message = self._queueService.get_messages(queueName, num_messages=1, visibility_timeout=30) processData(message)
class RegistrationQueueClient(object): """Obtains next registration object from queue""" queueName = 'registrationsqueue' def __init__(self, account_name, account_key): self._queueService = QueueService(account_name, account_key) print("RegistrationQueue Initialized") def LookupTicket(self, processData): message = self._queueService.get_messages(self.queueName, num_messages=1) if (processData(message)): self._queueService.delete_message(self.queueName, message.message_id, message.pop_receipt)
def counting(): queue_service = QueueService(account_name=account_name, account_key=account_key) while True: messages = queue_service.get_messages('newuser') if messages: for message in messages: print(f"Receiving {message.content} from newuser channel...") add_new_user(message.content) print(f"Deleteing {message.content} from queue of users...") queue_service.delete_message('newuser', message.id, message.pop_receipt) new_proxy = get_new_proxy() print(f"Receiving proxy {new_proxy} from queue of proxies...") time.sleep(30)
class AzureQueue(object): def __init__(self, account_name, account_key, queue_name): self.queue_name = queue_name self.queue_service = QueueService(account_name=account_name, account_key=account_key) self.queue_service.create_queue(self.queue_name) def put_message_into_queue(self, content) -> QueueMessage: """ Publishes a message with `content` :param content: The queue message :returns: A QueueMessage that has the message as well as metadata :rtype: QueueMessage """ return self.queue_service.put_message(self.queue_name, content) def get_messages(self) -> list: """ Retrieves all of the messages that have been published into queue :param content: The queue message :returns: List of Queue messages :rtype: list """ return self.queue_service.get_messages(self.queue_name) def delete_message_from_queue(self, message_id, pop_receipt): self.queue_service.delete_message(self.queue_name, message_id, pop_receipt) def get_message_count(self): queue_metadata = self.queue_service.get_queue_metadata(self.queue_name) return queue_metadata.approximate_message_count def delete(self): return self.queue_service.delete_queue(self.queue_name) def empty(self): messages = queue_service.get_messages(self.queue_name, num_messages=BATCH_NUMBER, visibility_timeout=TIMEOUT_IN_SECONDS) for message in messages: self.queue_service.delete_message(self.queue_name, message.id, message.pop_receipt)
def queue(): account_name = config.STORAGE_ACCOUNT_NAME account_key = config.STORAGE_ACCOUNT_KEY queue_service = QueueService(account_name=account_name, account_key=account_key) print("Creating task queue") task_queue_name = config.TASK_QUEUE_NAME queue_service.create_queue(task_queue_name) print("Task queue created") queue_service.put_message(task_queue_name, u'message1') messages = queue_service.get_messages(task_queue_name, num_messages=16) for message in messages: print(message.content) queue_service.delete_message(task_queue_name, message.id, message.pop_receipt)
class MoeHandler(): def __init__(self, api, generator, queue_name, conn_str): self.api = api self.gen_util = Generate_util(generator) self.queue_name = queue_name self.queue_service = QueueService(connection_string=conn_str) self.queue_service.create_queue(queue_name) # loops forever, reading messages and sending to gen util for parsing. Then sends the message def read_messages(self): while 1: try: messages = self.queue_service.get_messages(self.queue_name) if not messages: time.sleep(2) # We'll wait a few seconds and check again. continue for message in messages: event = json.loads(message.content) if event['msg_type'] == 'tweet': self.handle_tweet(event) elif event['msg_type'] == 'dm': self.handle_dm(event) self.queue_service.delete_message( self.queue_name, message.id, message.pop_receipt) except Exception as e: print("Exception handling messages") print(e) def handle_dm(self, event): imgpth = self.gen_util.parse_msg(event['text']) media = self.api.media_upload(imgpth) self.api.send_direct_message( recipient_id=event['user_id'], text="Here's your generated anime girl!", attachment_type='media', attachment_media_id=media.media_id) return def handle_tweet(self, event): imgpth = self.gen_util.parse_msg(event['text']) self.api.update_with_media(imgpth, status="Here's your generated anime girl!", in_reply_to_status_id=event['respond_id'], auto_populate_reply_metadata=True) return
class TimeLineWorker: QUEUE_KEY = '' ACCOUNT_NAME = '' QUEUE_NAME = '' def __init__(self): self.queue_service = QueueService(account_name=TimeLineWorker.ACCOUNT_NAME, account_key=TimeLineWorker.QUEUE_KEY) def insert_message(self): obj = { "message": "test message", "other_key": 10 } message = unicode(json.dumps(obj)) self.queue_service.put_message(TimeLineWorker.QUEUE_NAME, message) def get_next_message(self): messages = self.queue_service.get_messages(TimeLineWorker.QUEUE_NAME) for message in messages: print message.content self.queue_service.delete_message(TimeLineWorker.QUEUE_NAME, message.id, message.pop_receipt)
def queue_sas(self): queue_name = self._create_queue() self.service.put_message(queue_name, u'message1') # Access only to the messages in the given queue # Process permissions to access messages # Expires in an hour token = self.service.generate_queue_shared_access_signature( queue_name, QueuePermissions.PROCESS, datetime.utcnow() + timedelta(hours=1), ) # Create a service and use the SAS sas_service = QueueService( account_name=self.account.account_name, sas_token=token, ) messages = sas_service.get_messages(queue_name) for message in messages: print(message.content) # message1 self.service.delete_queue(queue_name)
'redis_pass': os.environ['REDIS_PASS'], 'redis_server': os.environ['REDIS_SERVER'], 'sms_user': os.environ['SMS_USER'], 'sms_pass': os.environ['SMS_PASS'] } stor_acc_name = service_keys['stor_acc_name'] stor_acc_key = service_keys['stor_acc_key'] redis_pass = service_keys['redis_pass'] redis_server = service_keys['redis_server'] sms_user = service_keys['sms_user'] sms_pass = service_keys['sms_pass'] # storage queue_service = QueueService(account_name=stor_acc_name, account_key=stor_acc_key) # redis r = redis.StrictRedis(host=redis_server, port=6380, db=0, password=redis_pass, ssl=True) while True: messages = queue_service.get_messages('taskqueue', numofmessages=16, visibilitytimeout=5*60) for message in messages: d = json.loads(message.message_text) suffix = d['suffix'] mobile = r.get(suffix) image = d['image'] odp = str(image) payload = {'username': sms_user, 'password': sms_pass, 'from': 'Alert', 'to': mobile, 'message': odp} post = requests.post('https://api.smsapi.pl/sms.do', data=payload) queue_service.delete_message('taskqueue', message.message_id, message.pop_receipt)
class AzureService(object): VISIBILITY_TIMEOUT = 5*60 def __init__(self, connection_string, container_name, queue_get, queue_push, logger=None): self.ctnname = container_name self.getname = queue_get self.pushname = queue_push self.qs = QueueService(connection_string=connection_string, protocol='https', # endpoint_suffix='core.windows.net' ) self.bs = BlockBlobService(connection_string=connection_string) self.qs.create_queue(self.getname, timeout=1) self.qs.create_queue(self.pushname, timeout=1) self.bs.create_container(self.ctnname, timeout=1) if logger: logger.info('Init Azure success') def pushMessage(self, message, qname=None, logger=None): if qname is None: qname = self.pushname try: self.qs.put_message(self.pushname, message) except Exception as e: if logger: logger.exception('ERROR PUSH MESSAGE ') else: print 'ERROR PUSH MESSAGE ' print e def getMessage(self, qname=None, num=1, logger=None): if qname is None: qname = self.getname try: message = self.qs.get_messages(qname, num, visibility_timeout=self.VISIBILITY_TIMEOUT) except Exception as e: if logger: logger.exception('ERROR GET MESSAGE ') else: print 'ERROR GET MESSAGE ' print e return [] return message def getReceiptInfo(self, logger=None): message = self.getMessage(logger=logger) if len(message) > 0: rinfo = ReceiptSerialize.fromjson(message[0].content) return message[0], rinfo else: return None, None def count(self): metadata_get = self.qs.get_queue_metadata(self.getname) metadata_push = self.qs.get_queue_metadata(self.pushname) generator = self.bs.list_blobs(self.ctnname) bc = 0 for blob in generator: bc += 1 return {'get_count' : metadata_get.approximate_message_count, 'push_count': metadata_push.approximate_message_count, 'blob_count': bc } def uploadFolder(self, folderpath, logger): for filename in os.listdir(folderpath): if len(filename) > 4: suffix = filename[-4:].upper() else: continue if '.JPG' == suffix or 'JPEG' == suffix: receipt_metadata = ReceiptSerialize() receipt_metadata.receiptBlobName = unicode(filename, 'utf-8') self.qs.put_message(self.getname, b64encode(receipt_metadata.toString()).decode('utf-8')) self.bs.create_blob_from_path(self.ctnname, receipt_metadata.receiptBlobName, os.path.join(folderpath, filename), max_connections=2, timeout=None) logger.info('upload %s', filename) def getImage(self, imgname, logger=None): localpath= os.path.join(args.download_dir, imgname) try: self.bs.get_blob_to_path(self.ctnname, imgname, localpath) except AzureMissingResourceHttpError as e: if logger: logger.error('Blob named ' + imgname + ' doesnot exist.' , exc_info=True) else: print 'Blob named ' + imgname + ' doesnot exist.' print e return '' except Exception as e: if logger: logger.error('Exception while getting blob.', exc_info=True) else: print 'Exception while getting blob.' print e return None return localpath def deleteMessage(self, message, qname=None, logger=None): if qname is None: qname = self.getname try: self.qs.delete_message(qname, message.id, message.pop_receipt) except Exception as e: if logger: logger.exception('ERROR DELETE MESSAGE ') else: print 'ERROR DELETE MESSAGE ' print e def deleteImage(self, imgname, logger=None): try: self.bs.delete_blob(self.ctnname, imgname) except Exception as e: if logger: logger.exception('ERROR DELETE IMAGE ') else: print 'ERROR DELETE IMAGE ' print e def cleanUp(self): count = 0 print('deleted: ') while True: messages = self.qs.get_messages(self.getname) for message in messages: count += 1 self.qs.delete_message(self.getname, message.id, message.pop_receipt) if len(messages) == 0: break print(str(count) + ' from queue-get') count = 0 while True: messages = self.qs.get_messages(self.pushname) for message in messages: count += 1 self.qs.delete_message(self.pushname, message.id, message.pop_receipt) if len(messages) == 0: break print(str(count) + ' from queue-push') count = 0 generator = self.bs.list_blobs(self.ctnname) for blob in generator: count += 1 self.bs.delete_blob(self.ctnname, blob.name) print(str(count) + ' from container')
class StorageQueueContext(): """Initializes the repository with the specified settings dict. Required settings in config dict are: - AZURE_STORAGE_NAME - AZURE_STORAGE_KEY """ _models = [] _service = None _storage_key = '' _storage_name = '' def __init__(self, **kwargs): self._storage_name = kwargs.get('AZURE_STORAGE_NAME', '') self._storage_key = kwargs.get('AZURE_STORAGE_KEY', '') """ service init """ if self._storage_key != '' and self._storage_name != '': self._service = QueueService(account_name = self._storage_name, account_key = self._storage_key, protocol='https') """ registered models """ self._models = [] """ encrypt queue service """ if kwargs.get('AZURE_REQUIRE_ENCRYPTION', False): # Create the KEK used for encryption. # KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above. kek = KeyWrapper(kwargs.get('AZURE_KEY_IDENTIFIER', 'otrrentapi'), kwargs.get('SECRET_KEY', 'super-duper-secret')) # Key identifier # Create the key resolver used for decryption. # KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately. key_resolver = KeyResolver() key_resolver.put_key(kek) # Set the require Encryption, KEK and key resolver on the service object. self._service.require_encryption = True self._service.key_encryption_key = kek self._service.key_resolver_funcion = key_resolver.resolve_key pass def __create__(self, queue) -> bool: if (not self._service is None): try: self._service.create_queue(queue) return True except AzureException as e: log.error('failed to create {} with error {}'.format(queue, e)) return False else: return True pass def register_model(self, storagemodel:object): modelname = storagemodel.__class__.__name__ if isinstance(storagemodel, StorageQueueModel): if (not modelname in self._models): self.__create__(storagemodel._queuename) self._models.append(modelname) log.info('model {} registered successfully. Models are {!s}'.format(modelname, self._models)) pass def put(self, storagemodel:object) -> StorageQueueModel: """ insert queue message into storage """ modelname = storagemodel.__class__.__name__ if isinstance(storagemodel, StorageQueueModel): if (modelname in self._models): """ peek first message in queue """ try: message = self._service.put_message(storagemodel._queuename, storagemodel.getmessage()) storagemodel.mergemessage(message) except AzureException as e: log.error('can not save queue message: queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e)) storagemodel = None else: log.info('please register model {} first'.format(modelname)) storagemodel = None else: log.info('model {} is not a Queue Model'.format(modelname)) storagemodel = None return storagemodel def peek(self, storagemodel:object) -> StorageQueueModel: """ lookup the next message in queue """ modelname = storagemodel.__class__.__name__ if isinstance(storagemodel, StorageQueueModel): if (modelname in self._models): """ peek first message in queue """ try: messages = self._service.peek_messages(storagemodel._queuename, num_messages=1) """ parse retrieved message """ for message in messages: storagemodel.mergemessage(message) """ no message retrieved ?""" if storagemodel.id is None: storagemodel = None except AzureException as e: log.error('can not peek queue message: queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e)) storagemodel = None else: log.info('please register model {} first'.format(modelname)) storagemodel = None else: log.info('model {} is not a Queue Model'.format(modelname)) storagemodel = None return storagemodel def get(self, storagemodel:object, hide = 0) -> StorageQueueModel: """ lookup the next message in queue """ modelname = storagemodel.__class__.__name__ if isinstance(storagemodel, StorageQueueModel): if (modelname in self._models): """ get first message in queue """ try: if hide > 0: messages = self._service.get_messages(storagemodel._queuename, num_messages=1, visibility_timeout = hide) else: messages = self._service.get_messages(storagemodel._queuename, num_messages=1) """ parse retrieved message """ for message in messages: storagemodel.mergemessage(message) """ no message retrieved ?""" if storagemodel.id is None: storagemodel = None except AzureException as e: log.error('can not get queue message: queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e)) storagemodel = None else: log.info('please register model {} first'.format(modelname)) storagemodel = None else: log.info('model {} is not a Queue Model'.format(modelname)) storagemodel = None return storagemodel def update(self, storagemodel:object, hide = 0) -> StorageQueueModel: """ update the message in queue """ modelname = storagemodel.__class__.__name__ if isinstance(storagemodel, StorageQueueModel): if (modelname in self._models): """ check if message in queue """ if (storagemodel.id != '') and (storagemodel.pop_receipt != '') and (not storagemodel.id is None) and (not storagemodel.pop_receipt is None): try: content = storagemodel.getmessage() message = self._service.update_message(storagemodel._queuename, storagemodel.id, storagemodel.pop_receipt, visibility_timeout = hide, content=content) storagemodel.content = content storagemodel.pop_receipt = message.pop_receipt except AzureException as e: log.error('can not update queue message: queue {} with message.id {!s} because {!s}'.format(storagemodel._queuename, storagemodel.id, e)) storagemodel = None else: log.info('cant update queuemessage {} due to missing id and pop_receipt'.format(modelname)) storagemodel = None else: log.info('please register model {} first'.format(modelname)) storagemodel = None else: log.info('model {} is not a Queue Model'.format(modelname)) storagemodel = None return storagemodel def delete(self, storagemodel:object) -> bool: """ delete the message in queue """ modelname = storagemodel.__class__.__name__ deleted = False if isinstance(storagemodel, StorageQueueModel): if (modelname in self._models): """ check if message in queue """ if (storagemodel.id != '') and (storagemodel.pop_receipt != '') and (not storagemodel.id is None) and (not storagemodel.pop_receipt is None): try: self._service.delete_message(storagemodel._queuename, storagemodel.id, storagemodel.pop_receipt) deleted = True except AzureException as e: log.error('can not delete queue message: queue {} with message.id {!s} because {!s}'.format(storagemodel._queuename, storagemodel.id, e)) else: log.info('cant update queuemessage {} due to missing id and pop_receipt'.format(modelname)) else: log.info('please register model {} first'.format(modelname)) else: log.info('model {} is not a Queue Model'.format(modelname)) return deleted
class Workload(object): def __init__(self, config): self.config = config self.log_queue_service = QueueService(account_name=self.config.storage_account_name, sas_token=self.config.workload_tracker_sas_token) self.workload_complete_event = None self.scheduler_start_event = None self.jobs_queued_done = None self.processor_events = [] self.processor_fork_events = [] self.job_consolidation_status_events = [] self.job_processing_status_events = [] def get_events(self): print "Getting Relevant Workload Events" while True: # Setting message visibility to 2 days, so that the message won't get processed again messages = self.log_queue_service.get_messages(self.config.workload_tracker_queue_name, 32) for msg in messages: parsed = json.loads(msg.content) event = WorkloadEvent(WorkloadEventType(parsed["event_type"]), msg.insertion_time, parsed["content"]) # Workload Completed if event.event_type == WorkloadEventType.WORKLOAD_DONE: self.workload_complete_event = event # Scheduler - Main Start if event.event_type == WorkloadEventType.SCHEDULER_START: self.scheduler_start_event = event # Processor - Main Start if event.event_type == WorkloadEventType.PROCESSOR_START: self.processor_events.append(event) # Processor - Fork Start if event.event_type == WorkloadEventType.PROCESSOR_FORK_START: self.processor_fork_events.append(event) # Job Consolidation Status if event.event_type == WorkloadEventType.WORKLOAD_CONSOLIDATION_STATUS: self.job_consolidation_status_events.append(event) # Job Processing Status if event.event_type == WorkloadEventType.WORKLOAD_PROCESSING_STATUS: self.job_processing_status_events.append(event) # All Jobs Queued if event.event_type == WorkloadEventType.JOBS_QUEUE_DONE: self.jobs_queued_done = event if event is not None: print str(event.timestamp) + " " + event.contents # Delete the message self.log_queue_service.delete_message(self.config.workload_tracker_queue_name, msg.id, msg.pop_receipt) # Stop when the workload is completed if self.workload_complete_event is not None and self.scheduler_start_event is not None: break # Sleeping to avoid spamming if the queue is empty if not messages: time.sleep(10) def time_elapse(self, evt1, evt2): return divmod((evt2.timestamp - evt1.timestamp).total_seconds(), 60) def print_summary(self): print "\nSummary: " print "Scheduler Started: " + str(self.scheduler_start_event.timestamp) print "Workload Completed: " + str(self.workload_complete_event.timestamp) # Jobs Queued print "All Jobs Queued: " + str(self.jobs_queued_done.timestamp) elapse = self.time_elapse(self.scheduler_start_event, self.jobs_queued_done) print "Jobs Queued Elapsed Time: " + str(elapse[0]) + " mins, " + str(elapse[1]) + " secs" # Workload Completion elapse = self.time_elapse(self.scheduler_start_event, self.workload_complete_event) print "Workload Elapsed Time: " + str(elapse[0]) + " mins, " + str(elapse[1]) + " secs" # Processor Information self.processor_events.sort(key=lambda evt: evt.timestamp) print "Number of Processors: " + str(len(self.processor_events)) print "\tNumber of Forked Processors Instances: " + str(len(self.processor_fork_events)) print "\tFirst Processor Up: " + str(self.processor_events[0].timestamp) print "\tLast Processor Up: " + str(self.processor_events[-1].timestamp) # Job Processor Completion self.job_processing_status_events.sort(key=lambda evt: evt.timestamp) completed_processing_event = None for job_proc_evt in self.job_processing_status_events: status = job_proc_evt.contents.split()[-1].split('/') # Find when 100% of the jobs are reported to be completed processing if status[0] == status[1]: completed_processing_event = job_proc_evt break print "Final Processing Time: " + str(completed_processing_event.timestamp) elapse = self.time_elapse(self.scheduler_start_event, completed_processing_event) print "Processing Elapsed Time: " + str(elapse[0]) + " mins, " + str(elapse[1]) + " secs"
class Queue: def __init__(self, account_name, account_key, queue_name="logqueue"): """Initialiaze a queue. The type is set by the 'ACS_LOGGING_QUEUE_TYPE' environment variable. If it is set to 'AzureStorageQueue' then values must be provided for 'account_name' and 'account_key' which are values associated with the Azure Storage account. 'queue_name' is optional and defaults to 'logqueue'. """ self.log = Log() self.queue_type = config.ACS_LOGGING_QUEUE_TYPE self.queue_name = queue_name # self.log.debug("Queue type: " + self.queue_type + " / " + self.queue_name) if self.queue_type == "AzureStorageQueue": self.createAzureQueues(account_name, account_key) elif self.queue_type == "LocalFile": self.file_queue = open(config.UNPROCESSED_LOG_FILE, 'w+') else: self.log.error("Unknown queue type: " + queue_type) def getName(self): return self.queue_name def createAzureQueues(self, account_name, account_key): """ Create a queue for unprocessed log messages. Entries in the queue will be dequeued, processed and deleted upon success. """ self.queue_service = QueueService(account_name, account_key) self.queue_service.create_queue(self.queue_name) def close(self): """Perform any necessary clearnup on the queue at the end of a run. """ if self.queue_type == "AzureStorageQueue": pass elif self.queue_type == "LocalFile": self.file_queue.close() else: self.log.error("Unknown queue type: " + queue_type) def enqueue(self, msg): if self.queue_type == "LocalFile": file_queue.write(msg + '\n') elif self.queue_type == "AzureStorageQueue": self.queue_service.put_message(self.queue_name, msg) else: self.log.error("We don't know how to handle queues of type " + self.queue_type) self.log.debug(msg) def dequeue(self): messages = [] if self.queue_type == "LocalFile": with open(config.UNPROCESSED_LOG_FILE, 'r') as f: messages = f.readlines()[1] elif self.queue_type == "AzureStorageQueue": messages = self.queue_service.get_messages(self.queue_name) return messages def delete(self, message): self.queue_service.delete_message(self.queue_name, message.message_id, message.pop_receipt) # with open(config.PROCESSED_LOG_FILE, 'a') as processed: # processed.write(log) # os.remove(config.UNPROCESSED_LOG_FILE) def delete_queue(self, queue_name): self.queue_service.delete_queue(queue_name, False) def getLength(self): """ Get the approximate length of the queue """ queue_metadata = self.queue_service.get_queue_metadata(self.queue_name) count = queue_metadata['x-ms-approximate-messages-count'] return int(count) def peek_messages(self, num_messages): """ Peek at the top messages in the queue. This method does not remove the messages from the queue. """ return self.queue_service.peek_messages(self.queue_name, num_messages)
class AzureFunctionAppBackend: """ A wrap-up around Azure Function Apps backend. """ def __init__(self, config): self.log_level = os.getenv('CLOUDBUTTON_LOGLEVEL') self.name = 'azure_fa' self.config = config self.fa_client = FunctionAppClient(self.config) self.queue_service = QueueService( account_name=self.config['account_name'], account_key=self.config['account_key']) self.queue_service.encode_function = QueueMessageFormat.text_base64encode self.queue_service.decode_function = QueueMessageFormat.text_base64decode log_msg = 'Cloudbutton v{} init for Azure Function Apps'.format( __version__) logger.info(log_msg) if not self.log_level: print(log_msg) def create_runtime(self, docker_image_name, memory=None, timeout=azure_fa_config.RUNTIME_TIMEOUT_DEFAULT): """ Creates a new runtime into Azure Function Apps from the provided Linux image for consumption plan """ log_msg = 'Creating new Cloudbutton runtime for Azure Function Apps...' logger.info(log_msg) if not self.log_level: print(log_msg) logger.info('Extracting preinstalls for Azure runtime') metadata = self._generate_runtime_meta() logger.info('Creating new Cloudbutton runtime') action_name = self._format_action_name(docker_image_name) self._create_runtime(action_name) return metadata def delete_runtime(self, docker_image_name, extract_preinstalls=False): """ Deletes a runtime """ if extract_preinstalls: action_name = docker_image_name else: action_name = self._format_action_name(docker_image_name) self.fa_client.delete_action(action_name) queue_name = self._format_queue_name(docker_image_name, type='trigger') self.queue_service.delete_queue(queue_name) def invoke(self, docker_image_name, memory=None, payload={}): """ Invoke function """ action_name = self._format_action_name(docker_image_name) queue_name = self._format_queue_name(action_name, type='trigger') try: msg = self.queue_service.put_message(queue_name, json.dumps(payload)) activation_id = msg.id except Exception: logger.debug('Creating queue (invoke)') self.queue_service.create_queue(queue_name) return self.invoke(docker_image_name, memory=memory, payload=payload) return activation_id def get_runtime_key(self, docker_image_name, runtime_memory): """ Method that creates and returns the runtime key. Runtime keys are used to uniquely identify runtimes within the storage, in order to know which runtimes are installed and which not. """ action_name = self._format_action_name(docker_image_name) runtime_key = os.path.join(self.name, action_name) return runtime_key def _format_action_name(self, action_name): sha_1 = hashlib.sha1() block = action_name.encode('ascii', errors='ignore') sha_1.update(block) tag = sha_1.hexdigest()[:8] sha_1 = hashlib.sha1() block = self.config['account_name'].encode('ascii', errors='ignore') sha_1.update(block) tag = tag + sha_1.hexdigest()[:8] version = re.sub(r'[/_:.-]', '', __version__) action_name = action_name[:16] + '-' + version[:5] + '-' + tag return action_name def _format_queue_name(self, action_name, type): # Using different queue names because there is a delay between # deleting a queue and creating another one with the same name return action_name + '-' + type def _create_runtime(self, action_name, extract_preinstalls=False): """ Creates a new runtime with the base modules and cloudbutton """ def add_base_modules(): cmd = 'pip3 install -t {} -r requirements.txt'.format( azure_fa_config.ACTION_MODULES_DIR) child = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) # silent child.wait() logger.debug(child.stdout.read().decode()) logger.debug(child.stderr.read().decode()) if child.returncode != 0: cmd = 'pip install -t {} -r requirements.txt'.format( azure_fa_config.ACTION_MODULES_DIR) child = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE) # silent child.wait() logger.debug(child.stdout.read().decode()) logger.debug(child.stderr.read().decode()) if child.returncode != 0: logger.critical( 'Failed to install base modules for Azure Function') exit(1) def add_cloudbutton_module(): module_location = os.path.dirname( os.path.abspath(cloudbutton.__file__)) shutil.copytree( module_location, os.path.join(azure_fa_config.ACTION_MODULES_DIR, 'cloudbutton')) def get_bindings_str(action_name, extract_preinstalls=False): if not extract_preinstalls: bindings = { "scriptFile": "__init__.py", "bindings": [{ "name": "msgIn", "type": "queueTrigger", "direction": "in", "queueName": self._format_queue_name(action_name, 'trigger'), "connection": "AzureWebJobsStorage" }] } else: bindings = { "scriptFile": "__init__.py", "bindings": [{ "name": "msgIn", "type": "queueTrigger", "direction": "in", "queueName": self._format_queue_name(action_name, type='trigger'), "connection": "AzureWebJobsStorage" }, { "name": "msgOut", "type": "queue", "direction": "out", "queueName": self._format_queue_name(action_name, type='result'), "connection": "AzureWebJobsStorage" }] } return json.dumps(bindings) initial_dir = os.getcwd() temp_folder = next(tempfile._get_candidate_names()) os.mkdir(temp_folder) os.chdir(temp_folder) try: # Create project folder from template project_template = os.path.join( os.path.dirname(os.path.abspath(__file__)), 'action') project_dir = os.path.join(initial_dir, temp_folder, action_name) shutil.copytree(project_template, project_dir) os.chdir(project_dir) action_dir = os.path.join(project_dir, action_name) os.rename('action', action_dir) # Add the base dependencies and current cloudbutton module logger.debug('Adding runtime base modules') os.makedirs(azure_fa_config.ACTION_MODULES_DIR, exist_ok=True) add_base_modules() add_cloudbutton_module() # Set entry point file if extract_preinstalls: entry_point_file = 'extract_preinstalls_action.py' else: entry_point_file = 'handler_action.py' os.rename(os.path.join(action_dir, entry_point_file), os.path.join(action_dir, '__init__.py')) # Edit the function's bindings for it to be a queue triggered function with open(os.path.join(action_dir, 'function.json'), 'w') as bindings_file: bindings_file.write( get_bindings_str(action_name, extract_preinstalls)) # Create trigger queue, create action logger.debug('Creating trigger queue') queue_name = self._format_queue_name(action_name, type='trigger') self.queue_service.create_queue(queue_name) self.fa_client.create_action(action_name) except Exception as e: raise Exception("Unable to create the new runtime", e) finally: os.chdir(initial_dir) shutil.rmtree(temp_folder, ignore_errors=True) # Remove tmp project folder def _generate_runtime_meta(self): """ Extract installed Python modules from Azure runtime """ action_name = 'cloudbutton-extract-preinstalls-' + get_unique_id() self._create_runtime(action_name, extract_preinstalls=True) logger.debug("Invoking 'extract-preinstalls' action") try: runtime_meta = self._invoke_with_result(action_name) except Exception: raise Exception("Unable to invoke 'extract-preinstalls' action") try: self.delete_runtime(action_name, extract_preinstalls=True) except Exception: raise Exception("Unable to delete 'extract-preinstalls' action") if not runtime_meta or 'preinstalls' not in runtime_meta: raise Exception(runtime_meta) logger.debug("Extracted metadata succesfully") return runtime_meta def _invoke_with_result(self, action_name): result_queue_name = self._format_queue_name(action_name, type='result') self.queue_service.create_queue(result_queue_name) trigger_queue_name = self._format_queue_name(action_name, type='trigger') self.queue_service.put_message(trigger_queue_name, '') msg = [] while not msg: msg = self.queue_service.get_messages(result_queue_name, num_messages=1) time.sleep(0.5) result_str = msg[0].content self.queue_service.delete_queue(result_queue_name) return json.loads(result_str)
# Pedro Perez - 2015 # # ----------------------------------------------------------------------------- import sys import os import subprocess from azure.storage.queue import QueueService account_name = "mcdockerqueue" account_key = sys.argv[1] queuename = "servers" queue_service = QueueService(account_name, account_key) queue_metadata = queue_service.get_queue_metadata(queuename) count = queue_metadata['x-ms-approximate-messages-count'] print "There are %s messages in the queue" % count if count > 0: messages = queue_service.get_messages(queuename) for message in messages: print(message.message_text) # Remove message from the queue queue_service.delete_message(queuename, message.message_id, message.pop_receipt) else: print "There are no messages to process"
config.read('storage.conf') accName = config.get('storage_account', 'accName') accKey = config.get('storage_account', 'accKey') queueName = config.get('storage_account', 'queueName') # sudo pip install blessings from azure.storage.queue import QueueService, QueueMessageFormat from blessings import Terminal import sys import time queue_service = QueueService(account_name=accName, account_key=accKey) queue_service.decode_function = QueueMessageFormat.text_base64decode t = Terminal() print(t.green('Connected to Azure Storage Queue ' + queueName + '...')) # Get approximate number of messages in queue queue_metadata = queue_service.get_queue_metadata(queueName) count = queue_metadata.approximate_message_count print('Approximate number of messages in queue: ', count, '\n') while True: messages = queue_service.get_messages(queueName) if messages: # Get the next message for message in messages: print(t.bold_yellow(message.content)) queue_service.delete_message(queueName, message.id, message.pop_receipt) print(t.blue('-' * 40 + '\n')) time.sleep(4)
class Azure_Storage(): def __init__(self, create_new=False): account_name = config.STORAGE_ACCOUNT_NAME account_key = config.STORAGE_ACCOUNT_KEY self.task_queue_name = config.TASK_QUEUE_NAME self.table_name = config.TABLE_NAME self.container_name = config.BLOB_CONTAINER_NAME self.ImagePartitionKey = config.IMAGE_PARTITION_KEY self.table_service = TableService(account_name=account_name, account_key=account_key) self.block_blob_service = BlockBlobService(account_name=account_name, account_key=account_key) self.queue_service = QueueService(account_name=account_name, account_key=account_key) if create_new: queue_service.create_queue(task_queue_name) block_blob_service.create_container(container_name) table_service.create_table(table_name) def put_image(self, image_uuid, image_bytes): ret = self.block_blob_service.create_blob_from_bytes( self.container_name, image_uuid, image_bytes) return ret def get_image(self, image_uuid): ret = self.block_blob_service.get_blob_to_bytes( self.container_name, image_uuid).content return ret def put_classification_result(self, image_uuid, results): task = Entity() task.PartitionKey = self.ImagePartitionKey task.RowKey = image_uuid task.results = str(results) ret = self.table_service.insert_or_replace_entity( self.table_name, task) return ret def get_classification_result(self, image_uuid): try: task = self.table_service.get_entity(self.table_name, self.ImagePartitionKey, image_uuid) return task.results except Exception as e: return None def put_task(self, taskmsg): ret = self.queue_service.put_message(self.task_queue_name, taskmsg) return ret #payload is in message.content def get_task(self, num_messages=16): messages = self.queue_service.get_messages(self.task_queue_name, num_messages=num_messages, visibility_timeout=1 * 60) return messages def delete_task(self, message): ret = self.queue_service.delete_message(self.task_queue_name, message.id, message.pop_receipt) return ret
class Queue: def __init__(self, account_name, account_key, queue_name="logqueue"): """Initialiaze a queue. The type is set by the 'ACS_LOGGING_QUEUE_TYPE' environment variable. If it is set to 'AzureStorageQueue' then values must be provided for 'account_name' and 'account_key' which are values associated with the Azure Storage account. 'queue_name' is optional and defaults to 'logqueue'. """ self.log = Log() self.queue_type = config.ACS_LOGGING_QUEUE_TYPE self.queue_name = queue_name # self.log.debug("Queue type: " + self.queue_type + " / " + self.queue_name) if self.queue_type == "AzureStorageQueue": self.createAzureQueues(account_name, account_key) elif self.queue_type == "LocalFile": self.file_queue = open(config.UNPROCESSED_LOG_FILE, "w+") else: self.log.error("Unknown queue type: " + queue_type) def createAzureQueues(self, account_name, account_key): """ Create a queue for unprocessed log messages. Entries in the queue will be dequeued, processed and deleted upon success. """ self.queue_service = QueueService(account_name, account_key) self.queue_service.create_queue(self.queue_name) def close(self): """Perform any necessary clearnup on the queue at the end of a run. """ if self.queue_type == "AzureStorageQueue": pass elif self.queue_type == "LocalFile": self.file_queue.close() else: self.log.error("Unknown queue type: " + queue_type) def enqueue(self, msg, level="INFO"): msg = level + " - " + msg if self.queue_type == "LocalFile": file_queue.write(msg + "\n") elif self.queue_type == "AzureStorageQueue": self.queue_service.put_message(self.queue_name, msg) self.log.debug(msg) def dequeue(self): messages = [] if self.queue_type == "LocalFile": with open(config.UNPROCESSED_LOG_FILE, "r") as f: messages = f.readlines()[1] elif self.queue_type == "AzureStorageQueue": messages = self.queue_service.get_messages(self.queue_name) return messages def delete(self, message): self.queue_service.delete_message(self.queue_name, message.message_id, message.pop_receipt) # with open(config.PROCESSED_LOG_FILE, 'a') as processed: # processed.write(log) # os.remove(config.UNPROCESSED_LOG_FILE) def delete_queue(self, queue_name): self.queue_service.delete_queue(queue_name, False) def getLength(self): """ Get the approximate length of the queue """ queue_metadata = self.queue_service.get_queue_metadata(self.queue_name) count = queue_metadata["x-ms-approximate-messages-count"] return count
class MainPawWorker: """ Main class to use for running a worker. call start_workers() to start. """ def __init__(self, azure_storage_name, azure_storage_private_key, azure_queue_name, azure_table_name, tasks_module, workers, visibility_timeout=VISIBILITY_TIMEOUT): """ :param azure_storage_name: Name of Azure storage account :param azure_storage_private_key: Private key of Azure storage account. :param azure_queue_name: Name of the Azure queue to use. :param azure_table_name: Name of the Azure table to use. :param tasks_module: Module containing decorated functions to load from. :param workers: Int of workers. Ex: 4 :param visibility_timeout: Seconds in int to keep message in Azure queue """ self.account_name = azure_storage_name self.account_key = azure_storage_private_key self.queue_name = azure_queue_name self.table_name = azure_table_name self.tasks_module = tasks_module self.workers = workers self.visibility_timeout = visibility_timeout if self.visibility_timeout > MAXIMUM_VISIBILITY_TIMEOUT: raise PawError('self.visibility_timeout bigger than allowed limit') self.queue_service = QueueService(account_name=self.account_name, account_key=self.account_key) self.table_service = TableService(account_name=self.account_name, account_key=self.account_key) self.local_queue = Queue(self.workers) # self.logger = logging.getLogger() self.logger = LOGGER self.logger.info(PAW_LOGO) self.worker_process = Worker( local_queue=self.local_queue, queue_service=self.queue_service, queue_name=self.queue_name, table_service=self.table_service, table_name=azure_table_name, tasks=self._load_tasks(), ) self.pool = Pool(self.workers, self.worker_process.run, ()) signal.signal(signal.SIGTERM, self.on_exit) def on_exit(self, signum, frame): self.pool.terminate() sys.exit() def _load_tasks(self): """ Loads and returns decorated functions from a given modules, as a dict """ tasks = dict([ o for o in getmembers(self.tasks_module) if isfunction(o[1]) and hasattr(o[1], 'paw') ]) for t, f in tasks.items(): self.logger.info("REGISTERED '{}'".format(t)) if f.description: self.logger.info("\tdescription: '{}'".format(f.description)) if not tasks: self.logger.warning("No tasks found...") return tasks def start_workers(self, sleep_for=5): """ Starts workers and picks message from the Azure queue. On new message, when the local queue has room, the message is placed for a worker to pick-up :param sleep_for: Seconds to sleep for after a loop end. """ self.queue_service.create_queue(self.queue_name) create_table_if_missing(self.table_service, self.table_name) try: self.logger.info( "Cleaning up dead jobs left in {}".format(STARTED)) dead_jobs = self.table_service.query_entities( table_name=self.table_name, filter="status eq '{}'".format(STARTED)) for job in dead_jobs.items: log_to_table(table_service=self.table_service, table_name=self.table_name, message=job, status=LOST_WORKER, result="Lost worker, or task aborted.") except AzureException as e: self.logger.error("Cleaning dead tasks failed: {}".format(e)) while True: if self.local_queue.full(): time.sleep(sleep_for) try: new_msg = self.queue_service.get_messages( queue_name=self.queue_name, num_messages=1, visibility_timeout=self.visibility_timeout) except AzureException: self.logger.error("Error while getting message " "from Azure queue. Trying to create " "the queue") self.queue_service.create_queue(self.queue_name) time.sleep(sleep_for) continue if new_msg: msg = new_msg[0] try: content = json.loads(msg.content) except json.JSONDecodeError: self.logger.critical('Json error {}'.format( traceback.format_exc())) try: self.queue_service.delete_message( queue_name=self.queue_name, message_id=msg.id, pop_receipt=msg.pop_receipt) except AzureException: self.logger.critical( 'Deleting invalid message from queue failed: ' '{}'.format(traceback.format_exc())) continue if msg.dequeue_count > MAXIMUM_DEQUEUE_COUNT: log_to_table( table_service=self.table_service, table_name=self.table_name, message=content, status=FAILED, result="PAW MESSAGE: Dequeue count exceeded.", ) self.queue_service.delete_message(self.queue_name, msg.id, msg.pop_receipt) continue content['msg'] = msg while True: try: self.local_queue.put_nowait(content) break except Full: self.logger.info('LOCAL QUEUE FULL: waiting...') time.sleep(sleep_for) self.logger.debug('ADDING: {}'.format(content['task_name'])) time.sleep(sleep_for)
# Test getting messages from queue from azure.storage.queue import QueueService import json, base64 queue_service = QueueService(account_name='', account_key='') messages = queue_service.get_messages('gradientqueuein', visibility_timeout=4, num_messages=2) for message in messages: print(message.content) #queue_service.delete_message('taskqueue', message.id, message.pop_receipt)
import os from azure.storage.queue import QueueService import time # Grab environment variables. AZURE_STORAGE_ACCT = os.environ['AZURE_STORAGE_ACCT'] AZURE_QUEUE = os.environ['AZURE_QUEUE'] AZURE_QUEUE_KEY = os.environ['AZURE_QUEUE_KEY'] # Build queue object queue_service = QueueService(account_name=AZURE_STORAGE_ACCT, account_key=AZURE_QUEUE_KEY) while True: # Get queue count metadata = queue_service.get_queue_metadata(AZURE_QUEUE) queue_length = metadata.approximate_message_count print(queue_length) messages = queue_service.get_messages(AZURE_QUEUE, num_messages=32) for message in messages: queue_service.delete_message(AZURE_QUEUE, message.id, message.pop_receipt) print("Message deleted")
class QueueStorageHandlerTest(_TestCase): def setUp(self): self.service = QueueService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY, is_emulated=_EMULATED) # ensure that there's no message on the queue before each test queues = set() for cfg in LOGGING['handlers'].values(): if 'queue' in cfg: queues.add(cfg['queue']) for queue in self.service.list_queues(): if queue.name in queues: self.service.clear_messages(queue.name) def test_logging(self): # get the logger for the test logger_name = 'queue' logger = logging.getLogger(logger_name) handler_name = _get_handler_name(logger_name) # perform logging log_text = 'logging test' logger.info(log_text) # confirm that the massage has correct log text queue = _get_handler_config_value(handler_name, 'queue') messages = iter(self.service.get_messages(queue)) message = next(messages) text_expected = "INFO %s" % log_text if _get_handler_config_value(handler_name, 'base64_encoding'): text_expected = _base64_encode(text_expected) self.assertEqual(message.content, text_expected) # confirm that there's no more message in the queue with self.assertRaises(StopIteration): next(messages) def test_message_ttl(self): # get the logger for the test logger_name = 'message_ttl' logger = logging.getLogger(logger_name) handler_name = _get_handler_name(logger_name) # perform logging log_text = 'time-to-live test' logger.info(log_text) # confirm that the new message is visible till the ttl expires queue = _get_handler_config_value(handler_name, 'queue') messages = iter(self.service.get_messages(queue)) message = next(messages) text_expected = 'INFO %s' % log_text if _get_handler_config_value(handler_name, 'base64_encoding'): text_expected = _base64_encode(text_expected) self.assertEqual(message.content, text_expected) # confirm that there's no more message in the queue with self.assertRaises(StopIteration): next(messages) # confirm that the new message is invisible after the ttl expires ttl = _get_handler_config_value(handler_name, 'message_ttl') time.sleep(int(ttl)+5) messages = iter(self.service.get_messages(queue)) with self.assertRaises(StopIteration): next(messages) def test_visibility_timeout(self): # get the logger for the test logger_name = 'visibility_timeout' logger = logging.getLogger(logger_name) handler_name = _get_handler_name(logger_name) # perform logging log_text = 'visibility test' logger.info(log_text) # confirm that the new message is invisible till the timeout expires queue = _get_handler_config_value(handler_name, 'queue') messages = iter(self.service.get_messages(queue)) with self.assertRaises(StopIteration): next(messages) # confirm that the new message is visible after the timeout expires timeout = _get_handler_config_value(handler_name, 'visibility_timeout') time.sleep(int(timeout)+5) messages = iter(self.service.get_messages(queue)) message = next(messages) text_expected = 'INFO %s' % log_text if _get_handler_config_value(handler_name, 'base64_encoding'): text_expected = _base64_encode(text_expected) self.assertEqual(message.content, text_expected) # confirm that there's no more message in the queue with self.assertRaises(StopIteration): next(messages) def test_base64_encoding(self): # get the logger for the test logger_name = 'base64_encoding' logger = logging.getLogger(logger_name) handler_name = _get_handler_name(logger_name) # perform logging log_text = 'Base64 encoding test' logger.info(log_text) # confirm that the log message is encoded in Base64 queue = _get_handler_config_value(handler_name, 'queue') messages = iter(self.service.get_messages(queue)) message = next(messages) text_expected = "INFO %s" % log_text if _get_handler_config_value(handler_name, 'base64_encoding'): text_expected = _base64_encode(text_expected) self.assertEqual(message.content, text_expected) # confirm that there's no more message in the queue with self.assertRaises(StopIteration): next(messages)
delay = os.environ['delay'] else: delay = 0 if "docker" in os.environ: docker = os.environ['docker'] if "chronos" in os.environ: chronos = os.environ['chronos'] while True: # set up azure queue queue_service = QueueService(account_name=azurestoracct, account_key=azurequeuekey) # get messages from azure queue messages = queue_service.get_messages(azurequeue, num_messages=5) # delete from queue, create container, start container for message in messages: # delete message from azure queue queue_service.delete_message(azurequeue, message.id, message.pop_receipt) if "docker" in os.environ: # sample json # {"Image": "neilpeterson/stock-report","Cmd": ["--symbols=msft;lnkd", "[email protected]"],"Env": ["gmuser = [email protected]", "gmpass = TempForDemo2016"]} s = message.content.split(':') data = json.loads('{"Image": "' + docker_image + '","Cmd": ["--symbols=' + s[0] +'", "--email=' + s[1] + '","--delay=' + str(delay) + '"]}') print(data)