def render_video(request): template = loader.get_template('app/render_video.html') vidstatus = 'No Video Found.' queue_service = QueueService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY']) messages = queue_service.get_messages(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], num_messages=1, visibility_timeout=1*60) for message in messages: vidstatus = 'Queued for Rendering: ' + message.content message_obj = json.loads(message.content) access_token = ams_authenticate()['access_token'] asset = ams_post_request(access_token, "Assets", { 'Name': message_obj['filename'], 'AlternateId': message_obj['folder']}) asset_container = urllib.parse.urlparse(asset['Uri']).path[1:] asset_file = ams_post_request(access_token, "Files", { 'IsEncrypted': 'false', 'IsPrimary': 'false', 'MimeType': 'video/mp4', 'ContentFileSize': message_obj['size'], 'Name': message_obj['filename'], 'ParentAssetId': asset['Id']}) block_blob_service = BlockBlobService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY']) from_url = block_blob_service.make_blob_url(os.environ['SVPD_STORAGE_ACCOUNT_UPLOADED'], message_obj['folder'] + '/' + message_obj['filename']) block_blob_service.copy_blob(asset_container, message_obj['filename'], from_url) job = ams_verbose_post_request(access_token, "Jobs", { 'Name': message_obj['filename'], 'InputMediaAssets': [{ '__metadata': { 'uri': os.environ['AMS_API_ENDPOINT'] + 'Assets(\'' + asset['Id'] + '\')' } }], 'Tasks': [{ 'Name': 'Adaptive Streaming Task', 'Configuration': 'Adaptive Streaming', 'MediaProcessorId': 'nb:mpid:UUID:ff4df607-d419-42f0-bc17-a481b1331e56', 'TaskBody': '<?xml version="1.0" encoding="utf-16"?><taskBody><inputAsset>JobInputAsset(0)</inputAsset><outputAsset assetCreationOptions="0" assetFormatOption="0" assetName="' + message_obj['filename'] + ' - MES v1.1" storageAccountName="' + os.environ['SVPD_STORAGE_ACCOUNT_NAME'] + '">JobOutputAsset(0)</outputAsset></taskBody>' },{ 'Name': 'Indexing Task', 'Configuration': '<?xml version="1.0" encoding="utf-8"?><configuration version="2.0"><input><metadata key="title" value="blah" /></input><settings></settings><features><feature name="ASR"><settings><add key="Language" value="English" /><add key="GenerateAIB" value="False" /><add key="GenerateKeywords" value="True" /><add key="ForceFullCaption" value="False" /><add key="CaptionFormats" value="ttml;sami;webvtt" /></settings></feature></features></configuration>', 'MediaProcessorId': 'nb:mpid:UUID:233e57fc-36bb-4f6f-8f18-3b662747a9f8', 'TaskBody': '<?xml version="1.0" encoding="utf-16"?><taskBody><inputAsset>JobInputAsset(0)</inputAsset><outputAsset assetCreationOptions="0" assetFormatOption="0" assetName="' + message_obj['filename'] + ' - Indexed" storageAccountName="' + os.environ['SVPD_STORAGE_ACCOUNT_NAME'] + '">JobOutputAsset(1)</outputAsset></taskBody>' }] }) queue_service.put_message(os.environ['SVPD_STORAGE_ACCOUNT_ENCODING'], json.dumps({ 'filename': message_obj['filename'], 'folder': message_obj['folder'], 'size': message_obj['size'], 'job': job['d']})) queue_service.delete_message(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], message.id, message.pop_receipt) return HttpResponse(template.render({ 'vidstatus': vidstatus, }, request))
def test_sas_process(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recordingfile(self.test_mode): return # Arrange queue_name = self._create_queue() self.qs.put_message(queue_name, u'message1') token = self.qs.generate_queue_shared_access_signature( queue_name, QueuePermissions.PROCESS, datetime.utcnow() + timedelta(hours=1), ) # Act service = QueueService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_service_options(service, self.settings) result = service.get_messages(queue_name) # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.id) self.assertEqual(u'message1', message.content)
def consume(args): # Get the service resources queue_service = QueueService( account_name=os.environ.get('QUEUE_STORAGE_ACCOUNT'), account_key=os.environ.get('QUEUE_STORAGE_ACCESS_KEY')) queue_service.decode_function = QueueMessageFormat.binary_base64decode logger.debug('Listening for messages on {}'.format(args.azure_queue)) while True: messages = queue_service.get_messages( args.azure_queue, num_messages=16, visibility_timeout=args.wait_time) for message in messages: try: process(args, message) except AzureException as e: logger.exception(e) if not args.dry_run: logger.debug('deleting message {}'.format(message.id)) queue_service.delete_message(args.azure_queue, message.id, message.pop_receipt) time.sleep(args.wait_time)
def test_sas_process(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recordingfile(self.test_mode): return # Arrange self.qs.put_message(self.test_queues[0], 'message1') token = self.qs.generate_shared_access_signature( self.test_queues[0], self._get_shared_access_policy( QueueSharedAccessPermissions.PROCESS), ) # Act service = QueueService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_service_options(service, self.settings) result = service.get_messages(self.test_queues[0]) # Assert self.assertIsNotNone(result) self.assertEqual(1, len(result)) message = result[0] self.assertIsNotNone(message) self.assertNotEqual('', message.message_id) self.assertEqual('message1', message.message_text)
def test_sas_update(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recordingfile(self.test_mode): return # Arrange self.qs.put_message(self.test_queues[0], 'message1') token = self.qs.generate_shared_access_signature( self.test_queues[0], self._get_shared_access_policy( QueueSharedAccessPermissions.UPDATE), ) result = self.qs.get_messages(self.test_queues[0]) # Act service = QueueService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_service_options(service, self.settings) service.update_message( self.test_queues[0], result[0].message_id, 'updatedmessage1', result[0].pop_receipt, visibilitytimeout=0, ) # Assert result = self.qs.get_messages(self.test_queues[0]) self.assertEqual('updatedmessage1', result[0].message_text)
def getMessages(self): try: self.queue_service = QueueService( account_name=self.kwargs.account_name, account_key=self.kwargs.account_key, endpoint_suffix=self.kwargs.endpoint_suffix, ) self.queue_service.create_queue(self.kwargs.queue_name) except Exception as err: message = "Failed to connect to Azure Queue Service https://%s.queue.%s/%s Reason: " % ( self.kwargs.account_name, self.kwargs.endpoint_suffix, self.kwargs.queue_name) raise Exception(message + str(err).partition("\n")[0]) else: self.logging.info( "Connected to Azure Queue Service https://%s.queue.%s/%s" % (self.kwargs.account_name, self.kwargs.endpoint_suffix, self.kwargs.queue_name)) while self.loop(): for message in self.queue_service.get_messages( self.kwargs.queue_name, visibility_timeout=self.kwargs.visibility_timeout): for event in self.processIncomingMessage(message): self.submit(event, "outbox") if self.kwargs.auto_message_delete: self.queue_service.delete_message(self.kwargs.queue_name, message.id, message.pop_receipt)
def getmessage(): # Get messages from Azure queue. queue_service = QueueService(account_name=azurestoracct, account_key=azurequeuekey) messages = queue_service.get_messages(azurequeue, num_messages=5) return messages
def get_queue_client_by_uri(queue_uri): storage = StorageUtilities.get_storage_from_uri(queue_uri) queue_service = QueueService(account_name=storage.storage_name, account_key=storage.key) queue_service.create_queue(storage.container_name) return queue_service, storage.container_name
def __init__(self, queue_name): self.conn = QueueService(account_name=os.getenv('AZURE_ACCOUNT_NAME'), account_key=os.getenv('AZURE_ACCOUNT_KEY')) self.queue_name = queue_name self.conn.create_queue(queue_name) self.conn.encode_function = QueueMessageFormat.binary_base64encode self.conn.decode_function = QueueMessageFormat.binary_base64decode
def processMessage(data: DocumentData, startTime: datetime): startTime = datetime.utcnow() result = doMath() result = 0 args = sys.argv data.start = f'{startTime:%Y-%m-%d %H:%M:%S.%f%z}' data.host = args[2] data.port = args[4] data.worker = args[6] data.request = args[8] data.plataform = sys.platform data.node = platform.node() data.result = result data.version = "14" # Identifyier to filter logs # Calculating queue size storageKey = os.environ['StorageKey'] queue_service = QueueService(account_name='funcpyqueue2storage', account_key=storageKey) metadata = queue_service.get_queue_metadata('fibonaccicalculatorqueue') data.queuedMessages = metadata.approximate_message_count stopTime = datetime.utcnow() # Calculate times messageInsertionTime = datetime.strptime(data.msgInsertionTime, '%Y-%m-%d %H:%M:%S') data.stop = f'{stopTime:%Y-%m-%d %H:%M:%S.%f%z}' data.durationSeconds = (stopTime-startTime).total_seconds() data.delayInStartProcessingFromMessage = (startTime - messageInsertionTime).total_seconds()
def createAzureQueues(self, account_name, account_key): """ Create a queue for unprocessed log messages. Entries in the queue will be dequeued, processed and deleted upon success. """ self.queue_service = QueueService(account_name, account_key) self.queue_service.create_queue(self.queue_name)
def __init__(self): super(QueueBase, self).__init__() self.queue_service = QueueService( account_name='bobur', account_key= '6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw==' )
def enviar_aquivos_audio_blob(main_app, dir="audio_files/"): for file in glob.glob(dir + "*.wav"): try: print("Processando arquivo " + file + "...") meeting_code = file.split("_")[1].split("/")[1] blob = meeting_code + "/" + file print("Meeting code " + str(meeting_code)) blob_service = BlockBlobService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY) blob_service.create_blob_from_path(CONTAINER_NAME, blob, file) if os.path.exists(file): os.remove(file) queue_service = QueueService(account_name=ACCOUNT_NAME, account_key=ACCOUNT_KEY) queue_service.encode_function = QueueMessageFormat.text_base64encode payload = { "meeting-code": meeting_code, "blob": blob, "file-name": util.get_file_with_extension(file) } payload = json.dumps(payload, ensure_ascii=False) queue_service.put_message(QUEUE_NAME_AUDIO, payload) print("Arquivo " + file + " processado com sucesso.") main_app.mensagem["text"] = "File " + file + " synced successfully" except: traceback.format_exc()
def __init__(self): super(QueueBase, self).__init__() self.queue_service = QueueService( account_name='boburstorage', account_key= 'wRgukLsyhLtnI7qEk8mSGnIBC+IsiTTXEDF1/xnmBGDudJLSeYdtyuVzuSN5/cplJz88AJPyoVyjCmL9N1ECXw==' )
def __init__(self): signal.signal(signal.SIGINT, self.signal_handler) self.fps = int(os.environ.get('FPS', '40')) self.yd = float(os.environ.get('YEAR_DURATION', '0.5')) self.queue_name = os.environ['AZURE_QUEUE_NAME'] self.fail_queue_name = '%sfail' % self.queue_name self.logger = logging.getLogger('main') twitter_auth = tweepy.OAuthHandler( os.environ['TWITTER_CONSUMER_KEY'], os.environ['TWITTER_CONSUMER_SECRET']) twitter_auth.set_access_token(os.environ['TWITTER_APP_TOKEN_KEY'], os.environ['TWITTER_APP_TOKEN_SECRET']) self.twitter_api = tweepy.API(twitter_auth) twitter_user = self.twitter_api.me() self.print_stdout('Logged in as @%s' % twitter_user.screen_name) self.vid_tweet = VideoTweet(self.twitter_api) self.queue_service = QueueService( account_name=os.environ['AZURE_STORAGE_ACCOUNT_NAME'], account_key=os.environ['AZURE_STORAGE_ACCOUNT_KEY']) self.queue_service.create_queue(self.queue_name) self.queue_service.create_queue(self.fail_queue_name) self.print_stdout('Created queue %s' % self.queue_name)
def __init__(self, queue_name, config: AzureStorageConfig): self._queue_name = queue_name self._queue_service = QueueService(account_name=config.account_name, account_key=config.account_key) self._queue_service.encode_function = QueueMessageFormat.text_base64encode self._queue_service.decode_function = QueueMessageFormat.text_base64decode
def __init__(self, **kwargs): self._storage_name = kwargs.get('AZURE_STORAGE_NAME', '') self._storage_key = kwargs.get('AZURE_STORAGE_KEY', '') """ service init """ if self._storage_key != '' and self._storage_name != '': self._service = QueueService(account_name = self._storage_name, account_key = self._storage_key, protocol='https') """ registered models """ self._models = [] """ encrypt queue service """ if kwargs.get('AZURE_REQUIRE_ENCRYPTION', False): # Create the KEK used for encryption. # KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above. kek = KeyWrapper(kwargs.get('AZURE_KEY_IDENTIFIER', 'otrrentapi'), kwargs.get('SECRET_KEY', 'super-duper-secret')) # Key identifier # Create the key resolver used for decryption. # KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately. key_resolver = KeyResolver() key_resolver.put_key(kek) # Set the require Encryption, KEK and key resolver on the service object. self._service.require_encryption = True self._service.key_encryption_key = kek self._service.key_resolver_funcion = key_resolver.resolve_key pass
def main(myblob: func.InputStream): #//TODO: Ensure queue name is 'weather-data' queue_name = 'weather-data' #//TODO: Add Storage Account Name and Key from https://portal.azure.com/#@[user_email]/resource/subscriptions/[subscription_id]/resourceGroups/[resource_group_name]/providers/Microsoft.Storage/storageAccounts/[storage_account_name]/keys block_blob_service = BlockBlobService(account_name='//TODO: ', account_key=' //TODO') queue_service = QueueService(account_name='//TODO', account_key='//TODO') queue_service.encode_function = QueueMessageFormat.text_base64encode file_name = myblob.name.split("/")[1] #// Ensure that files are added to a blob container named 'weather-data' block_blob_service.get_blob_to_path('weather-data', file_name, file_name) with open(file_name, "r+") as file: reader = csv.reader(file) for idx, data in enumerate(reader): if idx != 0: if(len(data)> 13): city, country = data[6].split(",") datapoint =json.dumps({"date": data[1],"city": city, "country": country, "temperature": data[13][:-2]}) queue_service.put_message(queue_name, datapoint) else: logging.info(len(data)) logging.info(f"Python blob trigger function processed blob \n" f"Name: {myblob.name}\n" f"Blob Size: {myblob.length} bytes\n" )
def create_queue_from_storage_account(storage_account, name, session): token = StorageUtilities.get_storage_token(session) queue_service = QueueService( account_name=storage_account.name, token_credential=token) return queue_service.create_queue(name)
def __init__( self, account_name=None, account_key=None, protocol='https', queue='logs', message_ttl=None, visibility_timeout=None, base64_encoding=False, is_emulated=False, ): """ Initialize the handler. """ logging.Handler.__init__(self) self.service = QueueService(account_name=account_name, account_key=account_key, is_emulated=is_emulated, protocol=protocol) self.meta = {'hostname': gethostname(), 'process': os.getpid()} self.queue = _formatName(queue, self.meta) self.queue_created = False self.message_ttl = message_ttl self.visibility_timeout = visibility_timeout self.base64_encoding = base64_encoding
def test_sas_update(self): # SAS URL is calculated from storage key, so this test runs live only if TestMode.need_recordingfile(self.test_mode): return # Arrange queue_name = self._create_queue() self.qs.put_message(queue_name, u'message1') token = self.qs.generate_queue_shared_access_signature( queue_name, QueuePermissions.UPDATE, datetime.utcnow() + timedelta(hours=1), ) result = self.qs.get_messages(queue_name) # Act service = QueueService( account_name=self.settings.STORAGE_ACCOUNT_NAME, sas_token=token, ) self._set_service_options(service, self.settings) service.update_message( queue_name, result[0].id, result[0].pop_receipt, visibility_timeout=0, content=u'updatedmessage1', ) # Assert result = self.qs.get_messages(queue_name) self.assertEqual(u'updatedmessage1', result[0].content)
def sas_with_signed_identifiers(self): queue_name = self._create_queue() self.service.put_message(queue_name, u'message1') # Set access policy on queue access_policy = AccessPolicy(permission=QueuePermissions.PROCESS, expiry=datetime.utcnow() + timedelta(hours=1)) identifiers = {'id': access_policy} acl = self.service.set_queue_acl(queue_name, identifiers) # Wait 30 seconds for acl to propagate time.sleep(30) # Indicates to use the access policy set on the queue token = self.service.generate_queue_shared_access_signature( queue_name, id='id' ) # Create a service and use the SAS sas_service = QueueService( account_name=self.account.account_name, sas_token=token, ) messages = sas_service.get_messages(queue_name) for message in messages: print(message.content) # message1 self.service.delete_queue(queue_name)
def queue_service(self): if self._queue_service is None: self._queue_service = QueueService( account_name=self.conninfo.hostname, account_key=self.conninfo.password) return self._queue_service
def __init__(self, account, key, queue_name): self.queue_service = QueueService(account_name=account, account_key=key) self.queue_service.encode_function = QueueMessageFormat.text_base64encode self.queue = queue_name self.TIME_LIMIT = True
def get_queue(): if not hasattr(g, 'queue'): g.queue = QueueService(account_name=storage_account, account_key=storage_access_key) g.queue.create_queue('votes') g.queue.encode_function = QueueMessageFormat.text_base64encode return g.queue
def get_queue_client_by_uri(queue_uri, session): storage = StorageUtilities.get_storage_from_uri(queue_uri, session) queue_service = QueueService(account_name=storage.storage_name, token_credential=storage.token) queue_service.create_queue(storage.container_name) return queue_service, storage.container_name
def CreateQueue(azureQueueAccountName, azureQueueKey, azureQueueAnalysisResults): queue_service = QueueService(account_name=azureQueueAccountName, account_key=azureQueueKey) #create queue if doesnt exist if not queue_service.exists(azureQueueAnalysisResults): queue_service.create_queue(azureQueueAnalysisResults) return queue_service
def check(): queue_service = QueueService(account_name=account_name, account_key=account_key) queue_service.create_queue('monitoring') while True: print("Adding new user to scrap...") queue_service.put_message('monitoring', f'monitoring_{int(time.time())}') time.sleep(10)
def scrapping(): queue_service = QueueService(account_name=account_name, account_key=account_key) while True: messages = queue_service.get_messages('monitoring') if messages: for message in messages: print(f"Receiving user {message.content} from monitoring service") queue_service.delete_message('monitoring', message.id, message.pop_receipt) time.sleep(30)
def get_queue_client_by_uri(queue_uri): queue_name, storage_name, key = StorageUtilities.get_storage_from_uri( queue_uri) queue_service = QueueService(account_name=storage_name, account_key=key) queue_service.create_queue(queue_name) return queue_service, queue_name