def render_video(request):
    template = loader.get_template('app/render_video.html')
    vidstatus = 'No Video Found.'

    queue_service = QueueService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY'])
    messages = queue_service.get_messages(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], num_messages=1, visibility_timeout=1*60)
    
    for message in messages:
        vidstatus = 'Queued for Rendering: ' + message.content
        message_obj = json.loads(message.content)

        access_token = ams_authenticate()['access_token']
        
        asset = ams_post_request(access_token, "Assets", {
            'Name': message_obj['filename'], 
            'AlternateId': message_obj['folder']})
        
        asset_container = urllib.parse.urlparse(asset['Uri']).path[1:]

        asset_file = ams_post_request(access_token, "Files", {
            'IsEncrypted': 'false',
            'IsPrimary': 'false',
            'MimeType': 'video/mp4',
            'ContentFileSize': message_obj['size'],
            'Name': message_obj['filename'],
            'ParentAssetId': asset['Id']})

        block_blob_service = BlockBlobService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY'])
        from_url = block_blob_service.make_blob_url(os.environ['SVPD_STORAGE_ACCOUNT_UPLOADED'], message_obj['folder'] + '/' + message_obj['filename'])
        block_blob_service.copy_blob(asset_container, message_obj['filename'], from_url)

        job = ams_verbose_post_request(access_token, "Jobs", {
            'Name': message_obj['filename'], 
            'InputMediaAssets': [{
                '__metadata': { 'uri': os.environ['AMS_API_ENDPOINT'] + 'Assets(\'' + asset['Id'] + '\')' }
            }],
            'Tasks': [{
                'Name': 'Adaptive Streaming Task',
                'Configuration': 'Adaptive Streaming',
                'MediaProcessorId': 'nb:mpid:UUID:ff4df607-d419-42f0-bc17-a481b1331e56',
                'TaskBody': '<?xml version="1.0" encoding="utf-16"?><taskBody><inputAsset>JobInputAsset(0)</inputAsset><outputAsset assetCreationOptions="0" assetFormatOption="0" assetName="' + message_obj['filename'] + ' - MES v1.1" storageAccountName="' + os.environ['SVPD_STORAGE_ACCOUNT_NAME'] + '">JobOutputAsset(0)</outputAsset></taskBody>'
            },{
                'Name': 'Indexing Task',
                'Configuration': '<?xml version="1.0" encoding="utf-8"?><configuration version="2.0"><input><metadata key="title" value="blah" /></input><settings></settings><features><feature name="ASR"><settings><add key="Language" value="English" /><add key="GenerateAIB" value="False" /><add key="GenerateKeywords" value="True" /><add key="ForceFullCaption" value="False" /><add key="CaptionFormats" value="ttml;sami;webvtt" /></settings></feature></features></configuration>',
                'MediaProcessorId': 'nb:mpid:UUID:233e57fc-36bb-4f6f-8f18-3b662747a9f8',
                'TaskBody': '<?xml version="1.0" encoding="utf-16"?><taskBody><inputAsset>JobInputAsset(0)</inputAsset><outputAsset assetCreationOptions="0" assetFormatOption="0" assetName="' + message_obj['filename'] + ' - Indexed" storageAccountName="' + os.environ['SVPD_STORAGE_ACCOUNT_NAME'] + '">JobOutputAsset(1)</outputAsset></taskBody>'
            }]
            })

        queue_service.put_message(os.environ['SVPD_STORAGE_ACCOUNT_ENCODING'], json.dumps({ 
            'filename': message_obj['filename'],
            'folder': message_obj['folder'],
            'size': message_obj['size'],
            'job': job['d']}))

        queue_service.delete_message(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], message.id, message.pop_receipt)   

    return HttpResponse(template.render({
        'vidstatus': vidstatus,
    }, request))
def consume(args):
    # Get the service resources

    queue_service = QueueService(
        account_name=os.environ.get('QUEUE_STORAGE_ACCOUNT'),
        account_key=os.environ.get('QUEUE_STORAGE_ACCESS_KEY'))

    queue_service.decode_function = QueueMessageFormat.binary_base64decode

    logger.debug('Listening for messages on {}'.format(args.azure_queue))
    while True:
        messages = queue_service.get_messages(
            args.azure_queue,
            num_messages=16,
            visibility_timeout=args.wait_time)
        for message in messages:
            try:
                process(args, message)
            except AzureException as e:
                logger.exception(e)
            if not args.dry_run:
                logger.debug('deleting message {}'.format(message.id))
                queue_service.delete_message(args.azure_queue, message.id,
                                             message.pop_receipt)
        time.sleep(args.wait_time)
Beispiel #3
0
class AzureStorageQueue(Common.Contracts.Queue):
    def __init__(self, queue_name, config: AzureStorageConfig):
        self._queue_name = queue_name
        self._queue_service = QueueService(account_name=config.account_name,
                                           account_key=config.account_key)

        self._queue_service.encode_function = QueueMessageFormat.text_base64encode
        self._queue_service.decode_function = QueueMessageFormat.text_base64decode

    def push(self, message):
        self._queue_service.create_queue(self._queue_name)
        self._queue_service.put_message(self._queue_name, message)

    def pop(self):
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            result = message.content
            self._queue_service.delete_message(self._queue_name, message.id,
                                               message.pop_receipt)
            return result

    def peek(self):
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            return message.content
class AzureProvider(BaseProvider):
    """Queue Provider for the Microsoft Azure."""

    def __init__(self, account_name, account_key):
        self.queue_service = QueueService(
            account_name=account_name, account_key=account_key
        )

    def put_message(self, queue_name: str, message: str):
        self.queue_service.put_message(
            queue_name=queue_name,
            message_text=message,
        )

    def get_message(self, queue_name: str):
        try:
            queue_message = self.queue_service.get_messages(
                queue_name=queue_name, numofmessages=1
            ).queue_messages[0]
        except IndexError:
            return {}
        else:
            return {
                attr_name: getattr(queue_message, attr_name)
                for attr_name in dir(queue_message)
                if not attr_name.startswith('_')
            }

    def delete_message(self, queue_name: str, message: dict):
        message_id = message.get('message_id')
        pop_receipt = message.get('pop_receipt')

        self.queue_service.delete_message(
            queue_name, message_id=message_id, popreceipt=pop_receipt,
        )
Beispiel #5
0
class QueueBase(object):
    """docstring for QueueBase"""
    def __init__(self):
        super(QueueBase, self).__init__()
        self.queue_service = QueueService(
            account_name='boburstorage',
            account_key=
            'wRgukLsyhLtnI7qEk8mSGnIBC+IsiTTXEDF1/xnmBGDudJLSeYdtyuVzuSN5/cplJz88AJPyoVyjCmL9N1ECXw=='
        )

    def save_message_to_queue(self, queue, data):
        self.queue_service.put_message(queue, data)
        print('url added into queue...\n')

    def get_data_from_url(self):
        print('reading data from blob using url in queue...\n')
        data = self.queue_service.get_messages('taskqueue')

        response = urllib2.urlopen(data[0].content)
        numbers = response.read()

        self.queue_service.delete_message('taskqueue', data[0].id,
                                          data[0].pop_receipt)
        return numbers.splitlines()

    def get_messages_from_queue(self, queue):
        return self.queue_service.get_messages(queue)

    def delete_message_from_queue(self, queue, message):
        self.queue_service.delete_message(queue, message.id,
                                          message.pop_receipt)
Beispiel #6
0
class QueueBase(object):
    """docstring for QueueBase"""
    def __init__(self):
        super(QueueBase, self).__init__()
        self.queue_service = QueueService(
            account_name='bobur',
            account_key=
            '6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw=='
        )

    def save_message_to_queue(self, queue, data):
        self.queue_service.put_message(queue, data)
        print('url added into queue...\n')

    def get_data_from_url(self):
        print('reading data from blob using url in queue...\n')
        data = self.queue_service.get_messages('taskqueue')

        response = urllib2.urlopen(data[0].content)
        numbers = response.read()

        self.queue_service.delete_message('taskqueue', data[0].id,
                                          data[0].pop_receipt)
        return numbers.splitlines()

    def get_messages_from_queue(self, queue):
        return self.queue_service.get_messages(queue)

    def delete_message_from_queue(self, queue, message):
        self.queue_service.delete_message(queue, message.id,
                                          message.pop_receipt)
Beispiel #7
0
class AzureQueue(object):
    def __init__(self, queue_name):
        self.conn = QueueService(account_name=os.getenv('AZURE_ACCOUNT_NAME'),
                                 account_key=os.getenv('AZURE_ACCOUNT_KEY'))
        self.queue_name = queue_name
        self.conn.create_queue(queue_name)
        self.conn.encode_function = QueueMessageFormat.binary_base64encode
        self.conn.decode_function = QueueMessageFormat.binary_base64decode

    def enqueue(self, func, *args, **kwargs):
        task = SimpleTask(func, *args, **kwargs)
        serialized_task = pickle.dumps(task, protocol=pickle.HIGHEST_PROTOCOL)
        self.conn.put_message(self.queue_name, serialized_task)
        return task.id

    def dequeue(self):
        messages = self.conn.get_messages(self.queue_name)
        if len(messages) == 1:
            serialized_task = messages[0]
            task = pickle.loads(serialized_task.content)
            self.conn.delete_message(self.queue_name, serialized_task.id,
                                     serialized_task.pop_receipt)
            return task

    def get_length(self):
        metadata = self.conn.get_queue_metadata(self.queue_name)
        return metadata.approximate_message_count
Beispiel #8
0
def scrapping():
    queue_service = QueueService(account_name=account_name, account_key=account_key)
    while True:
        messages = queue_service.get_messages('monitoring')
        if messages:
            for message in messages:
                print(f"Receiving user {message.content} from monitoring service")
                queue_service.delete_message('monitoring', message.id, message.pop_receipt)
        time.sleep(30)
Beispiel #9
0
class AzureStorageQueue(Queue):
    """Interface for interacting with an Azure Storage Queue (through the Queue
    contract)"""
    def __init__(self, queue_name, config: AzureStorageConfig):
        """Initializes the storage queue.

        :param queue_name: The name of the queue to access.
        If a queue with this name doesn't already exist on the
        storage account, the queue will be created on the first operation.
        :param config: AzureStorageConfig with a valid account name and
        account key
        """
        self._queue_name = queue_name
        self._queue_service = QueueService(account_name=config.account_name,
                                           account_key=config.account_key)

        self._queue_service.encode_function = \
            QueueMessageFormat.text_base64encode
        self._queue_service.decode_function = \
            QueueMessageFormat.text_base64decode

    def push(self, message):
        """Pushes a new message onto the queue."""
        self._queue_service.create_queue(self._queue_name)
        self._queue_service.put_message(self._queue_name, message)

    def pop(self):
        """Pops the first message from the queue and returns it."""
        self._queue_service.create_queue(self._queue_name)

        # get_messages prevents another client from getting the message
        # before we've had a chance to delete it. The visibility_timeout
        # prevents the message from being seen by other clients
        # for X seconds.
        messages = self._queue_service.get_messages(self._queue_name,
                                                    visibility_timeout=30)
        for message in messages:
            result = message.content
            self._queue_service.delete_message(self._queue_name, message.id,
                                               message.pop_receipt)
            return result

    def peek(self):
        """Peeks the fist message from the queue and returns it."""
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            return message.content

    @staticmethod
    def create(queue_name: str):
        """Helper function for creating a Azure Storage Queue from the
        storage_config property defined inside of AzureConfig."""
        azure_config = AzureConfig()
        return AzureStorageQueue(queue_name, azure_config.storage_config)
Beispiel #10
0
def get_new_proxy():
    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)
    messages = queue_service.get_messages('proxies')
    if messages:
        for message in messages:
            new_proxy = message.content
            queue_service.delete_message('proxies', message.id,
                                         message.pop_receipt)
        queue_service.put_message('proxies', new_proxy)
    return new_proxy
class RegistrationQueueClient(object):
    """Obtains next registration object from queue"""
    queueName = 'registrationsqueue'

    def __init__(self, account_name, account_key):
        self._queueService = QueueService(account_name, account_key)
        print("RegistrationQueue Initialized")

    def LookupTicket(self, processData):
        message = self._queueService.get_messages(self.queueName, num_messages=1)
        if(processData(message)):
            self._queueService.delete_message(self.queueName, message.message_id, message.pop_receipt)
Beispiel #12
0
class SimulationListener(Thread):
    def __init__(self):
        Thread.__init__(self)
        self._quit = Event()
        self.daemon = True
        self.log = logging.getLogger(__name__)

        settings = Settings()
        self.create_queue = 'create-simulated-machine'
        self.destroy_queue = 'destroy-simulated-machine'

        self.queue_service = QueueService(
            account_name=settings.azure_queue_account,
            account_key=settings.azure_queue_key
        )
        self.queue_service.create_queue(self.create_queue)
        self.queue_service.create_queue(self.destroy_queue)

    def run(self):
        # dislike of unstoppable threads
        while not self._quit.is_set():
            try:
                messages = self.queue_service.get_messages(self.create_queue, numofmessages=10)
                for m in messages:
                    machine_json = b64decode(m.message_text)
                    machine = json.loads(machine_json)
                    self.log.info("Creating: " + machine["Name"] + " on " + machine["Provider"])
                    self.log.debug(machine_json)
                    requests.post("http://localhost:8080/machines", machine_json)
                    self.queue_service.delete_message(self.create_queue, m.message_id, m.pop_receipt)
            except Exception as e:
                self.log.error(e.message)

            try:
                messages = self.queue_service.get_messages(self.destroy_queue, numofmessages=10)
                for m in messages:
                    machine_json = b64decode(m.message_text)
                    machine = json.loads(machine_json)
                    self.log.info("Deleting: " + machine["Name"] + " on " + machine["Provider"])
                    self.log.debug(machine_json)
                    requests.delete("http://localhost:8080/machines/" + machine["MachineId"])
                    self.queue_service.delete_message(self.destroy_queue, m.message_id, m.pop_receipt)
            except Exception as e:
                self.log.error(e.message)

            sleep(1)

    def quit(self):
        self._quit.set()
Beispiel #13
0
def counting():
    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)
    while True:
        messages = queue_service.get_messages('newuser')
        if messages:
            for message in messages:
                print(f"Receiving {message.content} from newuser channel...")
                add_new_user(message.content)
                print(f"Deleteing {message.content} from queue of users...")
                queue_service.delete_message('newuser', message.id,
                                             message.pop_receipt)
        new_proxy = get_new_proxy()
        print(f"Receiving proxy {new_proxy} from queue of proxies...")
        time.sleep(30)
Beispiel #14
0
class RegistrationQueueClient(object):
    """Obtains next registration object from queue"""
    queueName = 'registrationsqueue'

    def __init__(self, account_name, account_key):
        self._queueService = QueueService(account_name, account_key)
        print("RegistrationQueue Initialized")

    def LookupTicket(self, processData):
        message = self._queueService.get_messages(self.queueName,
                                                  num_messages=1)
        if (processData(message)):
            self._queueService.delete_message(self.queueName,
                                              message.message_id,
                                              message.pop_receipt)
Beispiel #15
0
def main():    
    storage_account_name = str(sys.argv[1])
    queue_name = str(sys.argv[2])
    destination_folder = str(sys.argv[3])
    
    if not storage_account_name or not queue_name or not destination_folder:
        print("usage: get_message.py storage_account_name queue_name destination_folder")
        sys.exit(1)
    
    base64_regex = re.compile("^([A-Za-z0-9+/]{4})*([A-Za-z0-9+/]{3}=|[A-Za-z0-9+/]{2}==)?$")  
    
    key_file = "{0}/{1}/acc_key.txt".format(destination_folder, storage_account_name)
    try :
        with open(key_file, "r") as f:
            account_key = f.read()
    except IOError:
        print("file not found or empty ", key_file)
    
    queue_service = QueueService(storage_account_name, account_key)

    metadata = queue_service.get_queue_metadata(queue_name)
    approximate_message_count = metadata.approximate_message_count
    print('fetching approximately {0} messages'.format(approximate_message_count))
    messages = queue_service.get_messages(queue_name)
    count = len(messages)
    total = 0
    while count > 0:
        for message in messages:
            if base64_regex.match(message.content):
                msg = base64.b64decode(message.content)     # Received message is encoded in base 64
                data = json.loads(msg.decode('utf8'))
            else:
                msg = message.content
                data = json.loads(msg)
                
            filename, folder = get_file_name(storage_account_name, queue_name, destination_folder)
            
            if not os.path.exists(folder):
                os.makedirs(folder)
            with open(r'{0}/{1}'.format(folder,filename), 'w') as f:
                json.dump(data, f, indent=2)
            queue_service.delete_message(queue_name, message.id, message.pop_receipt) 
            count -= 1
            total += 1
            print('successfully processed {0} messages of ~{1} from {2} queue {3}'.format(total,approximate_message_count,storage_account_name,queue_name))

        messages = queue_service.get_messages(queue_name)
        count = len(messages)
Beispiel #16
0
class AzureQueue(object):

  def __init__(self, account_name, account_key, queue_name):
    self.queue_name = queue_name
    self.queue_service = QueueService(account_name=account_name, account_key=account_key)
    self.queue_service.create_queue(self.queue_name)

  def put_message_into_queue(self, content) -> QueueMessage:
    """
    Publishes a message with `content`
    
    :param content: The queue message 

    :returns: A QueueMessage that has the message as well as metadata 
    :rtype: QueueMessage 
    """
    return self.queue_service.put_message(self.queue_name, content)

  def get_messages(self) -> list:
    """
    Retrieves all of the messages that have been published into queue 
    
    :param content: The queue message 

    :returns: List of Queue messages
    :rtype: list 
    """

    return self.queue_service.get_messages(self.queue_name)

  def delete_message_from_queue(self, message_id, pop_receipt):
    self.queue_service.delete_message(self.queue_name, message_id, pop_receipt)

  def get_message_count(self):
    queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
    return queue_metadata.approximate_message_count

  def delete(self):
    return self.queue_service.delete_queue(self.queue_name)

  def empty(self):
    messages = queue_service.get_messages(self.queue_name, 
                                          num_messages=BATCH_NUMBER, 
                                          visibility_timeout=TIMEOUT_IN_SECONDS) 
    for message in messages:
      self.queue_service.delete_message(self.queue_name, message.id, message.pop_receipt)
Beispiel #17
0
def queue():

    account_name = config.STORAGE_ACCOUNT_NAME
    account_key = config.STORAGE_ACCOUNT_KEY

    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)

    print("Creating task queue")
    task_queue_name = config.TASK_QUEUE_NAME
    queue_service.create_queue(task_queue_name)
    print("Task queue created")

    queue_service.put_message(task_queue_name, u'message1')

    messages = queue_service.get_messages(task_queue_name, num_messages=16)
    for message in messages:
        print(message.content)
        queue_service.delete_message(task_queue_name, message.id,
                                     message.pop_receipt)
Beispiel #18
0
class MoeHandler():

    def __init__(self, api, generator, queue_name, conn_str):
        self.api = api
        self.gen_util = Generate_util(generator)
        self.queue_name = queue_name
        self.queue_service = QueueService(connection_string=conn_str)
        self.queue_service.create_queue(queue_name)

    # loops forever, reading messages and sending to gen util for parsing. Then sends the message
    def read_messages(self):
        while 1:
            try:
                messages = self.queue_service.get_messages(self.queue_name)
                if not messages:
                    time.sleep(2)  # We'll wait a few seconds and check again.
                    continue
                for message in messages:
                    event = json.loads(message.content)
                    if event['msg_type'] == 'tweet':
                        self.handle_tweet(event)
                    elif event['msg_type'] == 'dm':
                        self.handle_dm(event)
                    self.queue_service.delete_message(
                        self.queue_name, message.id, message.pop_receipt)
            except Exception as e:
                print("Exception handling messages")
                print(e)

    def handle_dm(self, event):
        imgpth = self.gen_util.parse_msg(event['text'])
        media = self.api.media_upload(imgpth)
        self.api.send_direct_message(
            recipient_id=event['user_id'], text="Here's your generated anime girl!", attachment_type='media', attachment_media_id=media.media_id)
        return

    def handle_tweet(self, event):
        imgpth = self.gen_util.parse_msg(event['text'])
        self.api.update_with_media(imgpth, status="Here's your generated anime girl!",
                                   in_reply_to_status_id=event['respond_id'], auto_populate_reply_metadata=True)
        return
class TimeLineWorker:

    QUEUE_KEY = ''
    ACCOUNT_NAME = ''
    QUEUE_NAME = ''

    def __init__(self):
        self.queue_service = QueueService(account_name=TimeLineWorker.ACCOUNT_NAME, 
                                          account_key=TimeLineWorker.QUEUE_KEY)

    def insert_message(self):
        obj = {
            "message": "test message",
            "other_key": 10
        }
        message = unicode(json.dumps(obj))
        self.queue_service.put_message(TimeLineWorker.QUEUE_NAME, message)

    def get_next_message(self):
        messages = self.queue_service.get_messages(TimeLineWorker.QUEUE_NAME)
        for message in messages:
            print message.content
            self.queue_service.delete_message(TimeLineWorker.QUEUE_NAME, message.id, message.pop_receipt)
if "chronos" in os.environ:
    chronos = os.environ['chronos']

while True:
    # set up azure queue
    queue_service = QueueService(account_name=azurestoracct, account_key=azurequeuekey)

    # get messages from azure queue
    messages = queue_service.get_messages(azurequeue, num_messages=5)

    # delete from queue, create container, start container
    for message in messages:
        
        # delete message from azure queue
        queue_service.delete_message(azurequeue, message.id, message.pop_receipt)
                
        if "docker" in os.environ:

            # sample json
            # {"Image": "neilpeterson/stock-report","Cmd": ["--symbols=msft;lnkd", "[email protected]"],"Env": ["gmuser = [email protected]", "gmpass = TempForDemo2016"]}
            s = message.content.split(':')
            data = json.loads('{"Image": "' + docker_image + '","Cmd": ["--symbols=' + s[0] +'", "--email=' + s[1] + '","--delay=' + str(delay) + '"]}')
            print(data)
        
            # create and start docker container
            headers = {'Content-Type': 'application/json'}
            r = requests.post(docker + "create", data=json.dumps(data), headers=headers)
            b = json.loads(r.text)
            x = requests.post(docker + b['Id'] + "/start")
Beispiel #21
0
    'redis_server': os.environ['REDIS_SERVER'],
    'sms_user': os.environ['SMS_USER'],
    'sms_pass': os.environ['SMS_PASS']
}

stor_acc_name = service_keys['stor_acc_name']
stor_acc_key = service_keys['stor_acc_key']
redis_pass = service_keys['redis_pass']
redis_server = service_keys['redis_server']
sms_user = service_keys['sms_user']
sms_pass = service_keys['sms_pass']


# storage
queue_service = QueueService(account_name=stor_acc_name, account_key=stor_acc_key)

# redis
r = redis.StrictRedis(host=redis_server, port=6380, db=0, password=redis_pass, ssl=True)

while True:
    messages = queue_service.get_messages('taskqueue', numofmessages=16, visibilitytimeout=5*60)
    for message in messages:
        d = json.loads(message.message_text)
        suffix = d['suffix']
        mobile = r.get(suffix)
        image = d['image']
        odp = str(image)
        payload = {'username': sms_user, 'password': sms_pass, 'from': 'Alert', 'to': mobile, 'message': odp}
        post = requests.post('https://api.smsapi.pl/sms.do', data=payload)
        queue_service.delete_message('taskqueue', message.message_id, message.pop_receipt)
class AzureQueueStream:
    def __init__(self, start_topic, bingKey, computerVisionApiKey, storageKey):
        self.stop_event = threading.Event()
        self.counter = 0
        self.bingKey = bingKey
        self.computerVisionApiKey = computerVisionApiKey

        self.queue_service = None
        if storageKey:
            self.queue_service = QueueService(account_name='rpiimagedetectj34n5m', account_key=storageKey)
            self.queue_service.create_queue('rpi-queue')
        downloader = ImageDownloader(self.computerVisionApiKey)
        self.source_thread = FrameStream(BingImageSource(start_topic, self.bingKey, downloader)).start()

    def start(self):
        t = threading.Thread(target=self.get_topic)
        t.daemon = True
        t.start()
        return self
    
    def get_topic(self):
        while True:
            if self.stop_event.is_set():
                return

            # drain queue, retrieve last message
            lastMsg = None

            if self.queue_service:
                messages = self.queue_service.get_messages('rpi-queue')
                for message in messages:
                    lastMsg = message
                    self.queue_service.delete_message('rpi-queue', message.id, message.pop_receipt)        

            if lastMsg and lastMsg.content:
                jsonMsg = json.loads(base64.b64decode(lastMsg.content).decode('ascii'))
                searchTopic = jsonMsg.get("Text", "dog")
                self.source_thread.stop()
                downloader = ImageDownloader(self.computerVisionApiKey, jsonMsg)
                if searchTopic.startswith("http"):
                    self.source_thread = FrameStream(RemoteImageSource(searchTopic, downloader)).start()
                else:
                    print("Bing Image Search topic:", searchTopic)
                    self.source_thread = FrameStream(BingImageSource(searchTopic, self.bingKey, downloader), queueSize = 16).start()
                self.counter = 0
            time.sleep(2)
 
    def next_frame(self):
        return self.source_thread.next_frame()

    def stop(self):
        self.source_thread.stop()
        self.stop_event.set()

    def send_to_azure(self, frameInfo, predictions):
        msg = frameInfo.get("attrs", {}).get("template")
        if self.queue_service and self.counter < 10 and msg and msg.get("RelatesTo", {}):
            self.counter += 1
            msg["Url"] = frameInfo.get("attrs", {}).get("url", "")
            msg["Text"] = None
            if predictions:
                msg["Text"] = predictions[0][0]
            msg["Label"] = frameInfo.get("attrs", {}).get("visionApiLabel", "")
            self.queue_service.put_message('bot-queue', base64.b64encode(json.dumps(msg).encode('ascii')).decode('ascii'))
Beispiel #23
0
class Azure_Storage():
    def __init__(self, create_new=False):
        account_name = config.STORAGE_ACCOUNT_NAME
        account_key = config.STORAGE_ACCOUNT_KEY

        self.task_queue_name = config.TASK_QUEUE_NAME
        self.table_name = config.TABLE_NAME
        self.container_name = config.BLOB_CONTAINER_NAME
        self.ImagePartitionKey = config.IMAGE_PARTITION_KEY

        self.table_service = TableService(account_name=account_name,
                                          account_key=account_key)
        self.block_blob_service = BlockBlobService(account_name=account_name,
                                                   account_key=account_key)
        self.queue_service = QueueService(account_name=account_name,
                                          account_key=account_key)

        if create_new:
            queue_service.create_queue(task_queue_name)
            block_blob_service.create_container(container_name)
            table_service.create_table(table_name)

    def put_image(self, image_uuid, image_bytes):
        ret = self.block_blob_service.create_blob_from_bytes(
            self.container_name, image_uuid, image_bytes)
        return ret

    def get_image(self, image_uuid):
        ret = self.block_blob_service.get_blob_to_bytes(
            self.container_name, image_uuid).content
        return ret

    def put_classification_result(self, image_uuid, results):
        task = Entity()
        task.PartitionKey = self.ImagePartitionKey
        task.RowKey = image_uuid
        task.results = str(results)
        ret = self.table_service.insert_or_replace_entity(
            self.table_name, task)
        return ret

    def get_classification_result(self, image_uuid):
        try:
            task = self.table_service.get_entity(self.table_name,
                                                 self.ImagePartitionKey,
                                                 image_uuid)
            return task.results
        except Exception as e:
            return None

    def put_task(self, taskmsg):
        ret = self.queue_service.put_message(self.task_queue_name, taskmsg)
        return ret

    #payload is in message.content
    def get_task(self, num_messages=16):
        messages = self.queue_service.get_messages(self.task_queue_name,
                                                   num_messages=num_messages,
                                                   visibility_timeout=1 * 60)
        return messages

    def delete_task(self, message):
        ret = self.queue_service.delete_message(self.task_queue_name,
                                                message.id,
                                                message.pop_receipt)
        return ret
def rendered_video(request):
    ism_uri = ''
    vtt_uri = ''
    template = loader.get_template('app/rendered_video.html')
    vidstatus = 'No Running Job Found.'

    # Get the next message from the queue
    queue_service = QueueService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY'])
    messages = queue_service.get_messages(os.environ['SVPD_STORAGE_ACCOUNT_ENCODING'], num_messages=1, visibility_timeout=1*60)
    
    for message in messages:
        vidstatus = 'Rendering: ' + message.content
        message_obj = json.loads(message.content)

        access_token = ams_authenticate()['access_token']

        # Get the details about the job
        job = ams_get_request(access_token, message_obj['job']['__metadata']['uri'])

        # is it done?
        if job['State'] == 3:
            vidstatus = 'Done Rendering: ' + message.content

            #get a reference to our storage container
            block_blob_service = BlockBlobService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY'])
            
            #get a list of all the input and output assets associated to our job
            input_assets = ams_get_request(access_token, message_obj['job']['InputMediaAssets']['__deferred']['uri'])
            output_assets = ams_get_request(access_token, message_obj['job']['OutputMediaAssets']['__deferred']['uri'])

            #look through the input and output assets to figure out what one is for the indexer and for the Adaptive streaming files        
            index_asset = ''
            stream_asset = ''
            for output_asset in output_assets['value']:
                if output_asset['Name'].endswith('- Indexed'):
                    index_asset = output_asset
                elif output_asset['Name'].endswith('- MES v1.1'):
                    stream_asset = output_asset

            #Get the storage container names for each
            dest_container = urllib.parse.urlparse(stream_asset['Uri']).path[1:]
            src_container = urllib.parse.urlparse(index_asset['Uri']).path[1:]
            
            #loop over the indexer output files copying them to the adaptive streaming container
            src_blobs = block_blob_service.list_blobs(src_container)
            for src_blob in src_blobs:
                block_blob_service.copy_blob(dest_container, src_blob.name, output_asset['Uri'] + '/' + src_blob.name)

            #create the access policy if it doen't exist
            access_policies = ams_get_request(access_token, os.environ['AMS_API_ENDPOINT'] + 'AccessPolicies')
            access_policy_id = ''
            for access_policy in access_policies['value']:
                if access_policy['Name'] == 'StreamingAccessPolicy':
                    access_policy_id = access_policy['Id']

            if access_policy_id == '':
                access_policy = ams_verbose_post_request(access_token, 'AccessPolicies', {
                  'Name': 'StreamingAccessPolicy',
                  'DurationInMinutes': '52594560',
                  'Permissions': '9'
                })
                access_policy_id = access_policy['d']['Id']

            #create the locator
            locator = ams_verbose_post_request(access_token, 'Locators', {
                  'AccessPolicyId': access_policy_id,
                  'AssetId': stream_asset['Id'],
                  'Type': 2
                })

            #get the URLs to the streaming endpoint and the vtt file
            locator_asset_files = ams_get_request(access_token, os.environ['AMS_API_ENDPOINT'] + 'Assets(\'' + locator['d']['AssetId']  + '\')/Files')
            for locator_asset_file in locator_asset_files['value']:
                if locator_asset_file['Name'].endswith('.ism'):
                    ism_uri = locator['d']['Path'] + locator_asset_file['Name'] + '/manifest'
                    vtt_uri = locator['d']['Path'] + message_obj['filename'] + '.vtt'

            #delete the job
            ams_delete_request(access_token, message_obj['job']['__metadata']['uri'])

            #delete the unused assets
            ams_delete_request(access_token, os.environ['AMS_API_ENDPOINT'] + 'Assets(\'' + index_asset['Id'] + '\')')
            ams_delete_request(access_token, os.environ['AMS_API_ENDPOINT'] + 'Assets(\'' + input_assets['value'][0]['Id'] + '\')')

            #add the video to the database
            client = document_client.DocumentClient(os.environ['DOCUMENT_ENDPOINT'], {'masterKey': os.environ['DOCUMENT_KEY']})
            db = docdb_CreateDatabaseIfNotExists(client, 'svpd')
            collection = docdb_CreateCollectionIfNotExists(client, db, 'videos')

            doc = client.CreateDocument(collection['_self'],
            { 
                'id': message_obj['folder'].replace('/', '.'),
                'filename': message_obj['filename'],
                'vtt_uri': vtt_uri,
                'ism_uri': ism_uri
            })

            #remove the message from the queue
            queue_service.delete_message(os.environ['SVPD_STORAGE_ACCOUNT_ENCODING'], message.id, message.pop_receipt)   

    return HttpResponse(template.render({
        'vidstatus': vidstatus,
        'vtt_uri': vtt_uri,
        'ism_uri': ism_uri
    }, request))
Beispiel #25
0
class MainPawWorker:
    """
    Main class to use for running a worker. call start_workers() to start.
    """
    def __init__(self,
                 azure_storage_name,
                 azure_storage_private_key,
                 azure_queue_name,
                 azure_table_name,
                 tasks_module,
                 workers,
                 visibility_timeout=VISIBILITY_TIMEOUT):
        """
        :param azure_storage_name: Name of Azure storage account
        :param azure_storage_private_key: Private key of Azure storage account.
        :param azure_queue_name: Name of the Azure queue to use.
        :param azure_table_name: Name of the Azure table to use.
        :param tasks_module: Module containing decorated functions to load from.
        :param workers: Int of workers. Ex: 4
        :param visibility_timeout: Seconds in int to keep message in Azure queue
        """
        self.account_name = azure_storage_name
        self.account_key = azure_storage_private_key
        self.queue_name = azure_queue_name
        self.table_name = azure_table_name
        self.tasks_module = tasks_module
        self.workers = workers
        self.visibility_timeout = visibility_timeout

        if self.visibility_timeout > MAXIMUM_VISIBILITY_TIMEOUT:
            raise PawError('self.visibility_timeout bigger than allowed limit')

        self.queue_service = QueueService(account_name=self.account_name,
                                          account_key=self.account_key)
        self.table_service = TableService(account_name=self.account_name,
                                          account_key=self.account_key)
        self.local_queue = Queue(self.workers)
        # self.logger = logging.getLogger()
        self.logger = LOGGER

        self.logger.info(PAW_LOGO)

        self.worker_process = Worker(
            local_queue=self.local_queue,
            queue_service=self.queue_service,
            queue_name=self.queue_name,
            table_service=self.table_service,
            table_name=azure_table_name,
            tasks=self._load_tasks(),
        )
        self.pool = Pool(self.workers, self.worker_process.run, ())
        signal.signal(signal.SIGTERM, self.on_exit)

    def on_exit(self, signum, frame):
        self.pool.terminate()
        sys.exit()

    def _load_tasks(self):
        """
        Loads and returns decorated functions from a given modules, as a dict
        """
        tasks = dict([
            o for o in getmembers(self.tasks_module)
            if isfunction(o[1]) and hasattr(o[1], 'paw')
        ])

        for t, f in tasks.items():
            self.logger.info("REGISTERED '{}'".format(t))
            if f.description:
                self.logger.info("\tdescription: '{}'".format(f.description))
        if not tasks:
            self.logger.warning("No tasks found...")

        return tasks

    def start_workers(self, sleep_for=5):
        """
        Starts workers and picks message from the Azure queue. On new
        message, when the local queue has room, the message is placed for a
        worker to pick-up
        :param sleep_for: Seconds to sleep for after a loop end.
        """
        self.queue_service.create_queue(self.queue_name)
        create_table_if_missing(self.table_service, self.table_name)

        try:
            self.logger.info(
                "Cleaning up dead jobs left in {}".format(STARTED))
            dead_jobs = self.table_service.query_entities(
                table_name=self.table_name,
                filter="status eq '{}'".format(STARTED))
            for job in dead_jobs.items:
                log_to_table(table_service=self.table_service,
                             table_name=self.table_name,
                             message=job,
                             status=LOST_WORKER,
                             result="Lost worker, or task aborted.")

        except AzureException as e:
            self.logger.error("Cleaning dead tasks failed: {}".format(e))

        while True:
            if self.local_queue.full():
                time.sleep(sleep_for)

            try:
                new_msg = self.queue_service.get_messages(
                    queue_name=self.queue_name,
                    num_messages=1,
                    visibility_timeout=self.visibility_timeout)
            except AzureException:
                self.logger.error("Error while getting message "
                                  "from Azure queue. Trying to create "
                                  "the queue")
                self.queue_service.create_queue(self.queue_name)
                time.sleep(sleep_for)
                continue

            if new_msg:
                msg = new_msg[0]
                try:
                    content = json.loads(msg.content)
                except json.JSONDecodeError:
                    self.logger.critical('Json error {}'.format(
                        traceback.format_exc()))
                    try:
                        self.queue_service.delete_message(
                            queue_name=self.queue_name,
                            message_id=msg.id,
                            pop_receipt=msg.pop_receipt)
                    except AzureException:
                        self.logger.critical(
                            'Deleting invalid message from queue failed: '
                            '{}'.format(traceback.format_exc()))
                    continue

                if msg.dequeue_count > MAXIMUM_DEQUEUE_COUNT:
                    log_to_table(
                        table_service=self.table_service,
                        table_name=self.table_name,
                        message=content,
                        status=FAILED,
                        result="PAW MESSAGE: Dequeue count exceeded.",
                    )
                    self.queue_service.delete_message(self.queue_name, msg.id,
                                                      msg.pop_receipt)
                    continue

                content['msg'] = msg
                while True:
                    try:
                        self.local_queue.put_nowait(content)
                        break
                    except Full:
                        self.logger.info('LOCAL QUEUE FULL: waiting...')
                        time.sleep(sleep_for)

                self.logger.debug('ADDING: {}'.format(content['task_name']))

            time.sleep(sleep_for)
def deletemessage(messageid, popreceipt):
    queue_service = QueueService(account_name=azurestoracct,
                                 account_key=azurequeuekey)
    queue_service.delete_message(azurequeue, messageid, popreceipt)
from azure.storage.queue import QueueService

queue_service = QueueService(account_name='xxx', account_key='xxx')

messages = queue_service.get_messages('queue02',
                                      num_messages=1,
                                      visibility_timeout=5 * 60)
for message in messages:
    print(message.content)
    queue_service.delete_message('queue02', message.id, message.pop_receipt)
Beispiel #28
0
class StorageQueueContext():
    """Initializes the repository with the specified settings dict.
        Required settings in config dict are:
        - AZURE_STORAGE_NAME
        - AZURE_STORAGE_KEY
    """
    
    _models = []
    _service = None
    _storage_key = ''
    _storage_name = ''

    def __init__(self, **kwargs):

        self._storage_name = kwargs.get('AZURE_STORAGE_NAME', '')
        self._storage_key = kwargs.get('AZURE_STORAGE_KEY', '')

        """ service init """
        if self._storage_key != '' and self._storage_name != '':
            self._service = QueueService(account_name = self._storage_name, account_key = self._storage_key, protocol='https')

        """ registered models """
        self._models = []

        """ encrypt queue service """
        if kwargs.get('AZURE_REQUIRE_ENCRYPTION', False):

            # Create the KEK used for encryption.
            # KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above.
            kek = KeyWrapper(kwargs.get('AZURE_KEY_IDENTIFIER', 'otrrentapi'), kwargs.get('SECRET_KEY', 'super-duper-secret')) # Key identifier

            # Create the key resolver used for decryption.
            # KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately.
            key_resolver = KeyResolver()
            key_resolver.put_key(kek)

            # Set the require Encryption, KEK and key resolver on the service object.
            self._service.require_encryption = True
            self._service.key_encryption_key = kek
            self._service.key_resolver_funcion = key_resolver.resolve_key
        pass
     
    def __create__(self, queue) -> bool:
        if (not self._service is None):
            try:
                self._service.create_queue(queue)
                return True
            except AzureException as e:
                log.error('failed to create {} with error {}'.format(queue, e))
                return False
        else:
            return True
        pass

    def register_model(self, storagemodel:object):
        modelname = storagemodel.__class__.__name__     
        if isinstance(storagemodel, StorageQueueModel):
            if (not modelname in self._models):
                self.__create__(storagemodel._queuename)
                self._models.append(modelname)
                log.info('model {} registered successfully. Models are {!s}'.format(modelname, self._models))      
        pass

    def put(self, storagemodel:object) -> StorageQueueModel:
        """ insert queue message into storage """

        modelname = storagemodel.__class__.__name__
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ peek first message in queue """
                try:
                    message = self._service.put_message(storagemodel._queuename, storagemodel.getmessage())
                    storagemodel.mergemessage(message)

                except AzureException as e:
                    log.error('can not save queue message:  queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e))
                    storagemodel = None
            else:
                log.info('please register model {} first'.format(modelname))
                storagemodel = None
        else:
            log.info('model {} is not a Queue Model'.format(modelname))
            storagemodel = None

        return storagemodel

    def peek(self, storagemodel:object) -> StorageQueueModel:
        """ lookup the next message in queue """

        modelname = storagemodel.__class__.__name__
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ peek first message in queue """
                try:
                    messages = self._service.peek_messages(storagemodel._queuename, num_messages=1)

                    """ parse retrieved message """
                    for message in messages:
                        storagemodel.mergemessage(message)

                    """ no message retrieved ?"""
                    if storagemodel.id is None:
                        storagemodel = None

                except AzureException as e:
                    log.error('can not peek queue message:  queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e))
                    storagemodel = None
            else:
                log.info('please register model {} first'.format(modelname))
                storagemodel = None
        else:
            log.info('model {} is not a Queue Model'.format(modelname))
            storagemodel = None

        return storagemodel

    def get(self, storagemodel:object, hide = 0) -> StorageQueueModel:
        """ lookup the next message in queue """
        modelname = storagemodel.__class__.__name__
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ get first message in queue """
                try:
                    if hide > 0:
                        messages = self._service.get_messages(storagemodel._queuename, num_messages=1, visibility_timeout = hide)
                    else:
                        messages = self._service.get_messages(storagemodel._queuename, num_messages=1)
                    
                    """ parse retrieved message """
                    for message in messages:
                        storagemodel.mergemessage(message)

                    """ no message retrieved ?"""
                    if storagemodel.id is None:
                        storagemodel = None

                except AzureException as e:
                    log.error('can not get queue message:  queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e))
                    storagemodel = None
            else:
                log.info('please register model {} first'.format(modelname))
                storagemodel = None
        else:
            log.info('model {} is not a Queue Model'.format(modelname))
            storagemodel = None

        return storagemodel

    def update(self, storagemodel:object, hide = 0) -> StorageQueueModel:
        """ update the message in queue """
        modelname = storagemodel.__class__.__name__
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ check if message in queue """
                if (storagemodel.id != '') and (storagemodel.pop_receipt != '') and (not storagemodel.id is None) and (not storagemodel.pop_receipt is None):
                    try:
                        content = storagemodel.getmessage()
                        message = self._service.update_message(storagemodel._queuename, storagemodel.id, storagemodel.pop_receipt, visibility_timeout = hide, content=content)
                        storagemodel.content = content
                        storagemodel.pop_receipt = message.pop_receipt

                    except AzureException as e:
                        log.error('can not update queue message:  queue {} with message.id {!s} because {!s}'.format(storagemodel._queuename, storagemodel.id, e))
                        storagemodel = None
                else:
                    log.info('cant update queuemessage {} due to missing id and pop_receipt'.format(modelname))
                    storagemodel = None
            else:
                log.info('please register model {} first'.format(modelname))
                storagemodel = None
        else:
            log.info('model {} is not a Queue Model'.format(modelname))
            storagemodel = None

        return storagemodel

    def delete(self, storagemodel:object) -> bool:
        """ delete the message in queue """
        modelname = storagemodel.__class__.__name__
        deleted = False
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ check if message in queue """
                if (storagemodel.id != '') and (storagemodel.pop_receipt != '') and (not storagemodel.id is None) and (not storagemodel.pop_receipt is None):
                    try:
                        self._service.delete_message(storagemodel._queuename, storagemodel.id, storagemodel.pop_receipt)
                        deleted = True

                    except AzureException as e:
                        log.error('can not delete queue message:  queue {} with message.id {!s} because {!s}'.format(storagemodel._queuename, storagemodel.id, e))
                else:
                    log.info('cant update queuemessage {} due to missing id and pop_receipt'.format(modelname))
            else:
                log.info('please register model {} first'.format(modelname))
        else:
            log.info('model {} is not a Queue Model'.format(modelname))

        return deleted
class Queue:
    def __init__(self, account_name, account_key, queue_name="logqueue"):
        """Initialiaze a queue. The type is set by the
        'ACS_LOGGING_QUEUE_TYPE' environment variable. If it is set to
        'AzureStorageQueue' then values must be provided for
        'account_name' and 'account_key' which are values associated
        with the Azure Storage account. 'queue_name' is optional and
        defaults to 'logqueue'.

        """
        self.log = Log()
        self.queue_type = config.ACS_LOGGING_QUEUE_TYPE
        self.queue_name = queue_name
        # self.log.debug("Queue type: " + self.queue_type + " / " + self.queue_name)

        if self.queue_type == "AzureStorageQueue":
            self.createAzureQueues(account_name, account_key)
        elif self.queue_type == "LocalFile":
            self.file_queue = open(config.UNPROCESSED_LOG_FILE, 'w+')
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def getName(self):
        return self.queue_name
        
    def createAzureQueues(self, account_name, account_key):
        """
        Create a queue for unprocessed log messages. Entries in the queue
        will be dequeued, processed and deleted upon success.
        """
        self.queue_service = QueueService(account_name, account_key)
        self.queue_service.create_queue(self.queue_name)

    def close(self):
        """Perform any necessary clearnup on the queue
           at the end of a run.
        """
        if self.queue_type == "AzureStorageQueue":
            pass
        elif self.queue_type == "LocalFile":
            self.file_queue.close()
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def enqueue(self, msg):
        if self.queue_type == "LocalFile":
            file_queue.write(msg + '\n')
        elif self.queue_type == "AzureStorageQueue":
            self.queue_service.put_message(self.queue_name, msg)
        else:
            self.log.error("We don't know how to handle queues of type " + self.queue_type)
        self.log.debug(msg)

    def dequeue(self):
        messages = []
        if self.queue_type == "LocalFile":
            with open(config.UNPROCESSED_LOG_FILE, 'r') as f:
                messages = f.readlines()[1]
        elif self.queue_type == "AzureStorageQueue":
            messages = self.queue_service.get_messages(self.queue_name)
        return messages

    def delete(self, message):
        self.queue_service.delete_message(self.queue_name, message.message_id, message.pop_receipt)
        #  with open(config.PROCESSED_LOG_FILE, 'a') as processed:
        #    processed.write(log)
        #  os.remove(config.UNPROCESSED_LOG_FILE)

    def delete_queue(self, queue_name):
        self.queue_service.delete_queue(queue_name, False)

    def getLength(self):
        """
        Get the approximate length of the queue
        """
        queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
        count = queue_metadata['x-ms-approximate-messages-count']
        return int(count)

    def peek_messages(self, num_messages):
        """
        Peek at the top messages in the queue. This method does not remove the
        messages from the queue.
        """
        return self.queue_service.peek_messages(self.queue_name, num_messages)
Beispiel #30
0
class TweetSender(object):
    run = True

    def __init__(self):
        signal.signal(signal.SIGINT, self.signal_handler)
        self.fps = int(os.environ.get('FPS', '40'))
        self.yd = float(os.environ.get('YEAR_DURATION', '0.5'))
        self.queue_name = os.environ['AZURE_QUEUE_NAME']
        self.fail_queue_name = '%sfail' % self.queue_name

        self.logger = logging.getLogger('main')

        twitter_auth = tweepy.OAuthHandler(
            os.environ['TWITTER_CONSUMER_KEY'],
            os.environ['TWITTER_CONSUMER_SECRET'])
        twitter_auth.set_access_token(os.environ['TWITTER_APP_TOKEN_KEY'],
                                      os.environ['TWITTER_APP_TOKEN_SECRET'])
        self.twitter_api = tweepy.API(twitter_auth)
        twitter_user = self.twitter_api.me()
        self.print_stdout('Logged in as @%s' % twitter_user.screen_name)
        self.vid_tweet = VideoTweet(self.twitter_api)

        self.queue_service = QueueService(
            account_name=os.environ['AZURE_STORAGE_ACCOUNT_NAME'],
            account_key=os.environ['AZURE_STORAGE_ACCOUNT_KEY'])

        self.queue_service.create_queue(self.queue_name)
        self.queue_service.create_queue(self.fail_queue_name)
        self.print_stdout('Created queue %s' % self.queue_name)

    def print_stdout(self, message):
        self.logger.info(message)
        print(message)
        sys.stdout.flush()

    def print_exception(self, message):
        self.logger.exception(message)
        traceback.print_exc()
        print(message, file=sys.stderr)
        sys.stderr.flush()

    def status_url(self, status):
        return 'https://twitter.com/%s/status/%s/' % (status.user.screen_name,
                                                      status.id)

    def signal_handler(self, signal, frame):
        self.print_stdout('Exiting...')
        self.run = False

    def generate_mp4_file(self, data):
        if 'percentiles' in data:
            return write_mp4(data, self.yd, self.fps)
        return None

    def process_messages(self):
        for message in self.queue_service.get_messages(self.queue_name,
                                                       num_messages=1):
            try:
                content = base64.b64decode(message.content).decode('utf-8')
                data = json.loads(content)
                filename = self.generate_mp4_file(data)
                if filename is not None:
                    status = self.vid_tweet.tweet(filename, **data['tweet'])
                    self.print_stdout(
                        'Sent status with media: %s - %s' %
                        (data['tweet']['status'], self.status_url(status)))
                else:
                    status = self.twitter_api.update_status(**data['tweet'])
                    self.print_stdout(
                        'Sent status: %s - %s' %
                        (data['tweet']['status'], self.status_url(status)))
            except Exception:
                self.print_exception('Cannot send status: %s' % content)
                self.queue_service.put_message(self.fail_queue_name,
                                               message.content)
            self.queue_service.delete_message(self.queue_name, message.id,
                                              message.pop_receipt)

    def run(self):
        self.print_stdout('Start poll loop')
        while self.run:
            self.process_messages()
            time.sleep(1)
class Queue:
    def __init__(self, account_name, account_key, queue_name="logqueue"):
        """Initialiaze a queue. The type is set by the
        'ACS_LOGGING_QUEUE_TYPE' environment variable. If it is set to
        'AzureStorageQueue' then values must be provided for
        'account_name' and 'account_key' which are values associated
        with the Azure Storage account. 'queue_name' is optional and
        defaults to 'logqueue'.

        """
        self.log = Log()
        self.queue_type = config.ACS_LOGGING_QUEUE_TYPE
        self.queue_name = queue_name
        # self.log.debug("Queue type: " + self.queue_type + " / " + self.queue_name)

        if self.queue_type == "AzureStorageQueue":
            self.createAzureQueues(account_name, account_key)
        elif self.queue_type == "LocalFile":
            self.file_queue = open(config.UNPROCESSED_LOG_FILE, "w+")
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def createAzureQueues(self, account_name, account_key):
        """
        Create a queue for unprocessed log messages. Entries in the queue
        will be dequeued, processed and deleted upon success.
        """
        self.queue_service = QueueService(account_name, account_key)
        self.queue_service.create_queue(self.queue_name)

    def close(self):
        """Perform any necessary clearnup on the queue
           at the end of a run.
        """
        if self.queue_type == "AzureStorageQueue":
            pass
        elif self.queue_type == "LocalFile":
            self.file_queue.close()
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def enqueue(self, msg, level="INFO"):
        msg = level + " - " + msg
        if self.queue_type == "LocalFile":
            file_queue.write(msg + "\n")
        elif self.queue_type == "AzureStorageQueue":
            self.queue_service.put_message(self.queue_name, msg)
        self.log.debug(msg)

    def dequeue(self):
        messages = []
        if self.queue_type == "LocalFile":
            with open(config.UNPROCESSED_LOG_FILE, "r") as f:
                messages = f.readlines()[1]
        elif self.queue_type == "AzureStorageQueue":
            messages = self.queue_service.get_messages(self.queue_name)
        return messages

    def delete(self, message):
        self.queue_service.delete_message(self.queue_name, message.message_id, message.pop_receipt)
        #  with open(config.PROCESSED_LOG_FILE, 'a') as processed:
        #    processed.write(log)
        #  os.remove(config.UNPROCESSED_LOG_FILE)

    def delete_queue(self, queue_name):
        self.queue_service.delete_queue(queue_name, False)

    def getLength(self):
        """
        Get the approximate length of the queue
        """
        queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
        count = queue_metadata["x-ms-approximate-messages-count"]
        return count
Beispiel #32
0
import os
from azure.storage.queue import QueueService
import time

# Grab environment variables.
AZURE_STORAGE_ACCT = os.environ['AZURE_STORAGE_ACCT']
AZURE_QUEUE = os.environ['AZURE_QUEUE']
AZURE_QUEUE_KEY = os.environ['AZURE_QUEUE_KEY']

# Build queue object
queue_service = QueueService(account_name=AZURE_STORAGE_ACCT,
                             account_key=AZURE_QUEUE_KEY)

while True:

    # Get queue count
    metadata = queue_service.get_queue_metadata(AZURE_QUEUE)
    queue_length = metadata.approximate_message_count
    print(queue_length)

    messages = queue_service.get_messages(AZURE_QUEUE, num_messages=32)
    for message in messages:
        queue_service.delete_message(AZURE_QUEUE, message.id,
                                     message.pop_receipt)
        print("Message deleted")
class Workload(object):

    def __init__(self, config):
        self.config = config
        self.log_queue_service = QueueService(account_name=self.config.storage_account_name,
                sas_token=self.config.workload_tracker_sas_token)
        self.workload_complete_event = None
        self.scheduler_start_event = None
        self.jobs_queued_done = None
        self.processor_events = []
        self.processor_fork_events = []
        self.job_consolidation_status_events = []
        self.job_processing_status_events = []

    def get_events(self):
        print "Getting Relevant Workload Events"
        while True:
            # Setting message visibility to 2 days, so that the message won't get processed again
            messages = self.log_queue_service.get_messages(self.config.workload_tracker_queue_name, 32)
            for msg in messages:
                parsed = json.loads(msg.content)
                event = WorkloadEvent(WorkloadEventType(parsed["event_type"]), msg.insertion_time, parsed["content"])  

                # Workload Completed
                if event.event_type == WorkloadEventType.WORKLOAD_DONE:
                    self.workload_complete_event = event

                # Scheduler - Main Start
                if event.event_type == WorkloadEventType.SCHEDULER_START:
                    self.scheduler_start_event = event
                
                # Processor - Main Start
                if event.event_type == WorkloadEventType.PROCESSOR_START:
                    self.processor_events.append(event)
                
                # Processor - Fork Start
                if event.event_type == WorkloadEventType.PROCESSOR_FORK_START:
                    self.processor_fork_events.append(event)
                
                # Job Consolidation Status
                if event.event_type == WorkloadEventType.WORKLOAD_CONSOLIDATION_STATUS:
                    self.job_consolidation_status_events.append(event)
                
                # Job Processing Status
                if event.event_type == WorkloadEventType.WORKLOAD_PROCESSING_STATUS:
                    self.job_processing_status_events.append(event)
                
                # All Jobs Queued
                if event.event_type == WorkloadEventType.JOBS_QUEUE_DONE:
                    self.jobs_queued_done = event

                if event is not None:
                    print str(event.timestamp) + " " + event.contents
            
                # Delete the message
                self.log_queue_service.delete_message(self.config.workload_tracker_queue_name, msg.id, msg.pop_receipt)

            # Stop when the workload is completed    
            if self.workload_complete_event is not None and self.scheduler_start_event is not None:
                break

            # Sleeping to avoid spamming if the queue is empty
            if not messages:
                time.sleep(10)
    
    def time_elapse(self, evt1, evt2):
        return divmod((evt2.timestamp - evt1.timestamp).total_seconds(), 60)
    
    def print_summary(self):
        print "\nSummary: "
        print "Scheduler Started: " + str(self.scheduler_start_event.timestamp)
        print "Workload Completed: " + str(self.workload_complete_event.timestamp)
        
        # Jobs Queued
        print "All Jobs Queued: " + str(self.jobs_queued_done.timestamp)
        elapse = self.time_elapse(self.scheduler_start_event, self.jobs_queued_done)
        print "Jobs Queued Elapsed Time: " + str(elapse[0]) + " mins, " + str(elapse[1]) + " secs" 
        
        # Workload Completion
        elapse = self.time_elapse(self.scheduler_start_event, self.workload_complete_event)
        print "Workload Elapsed Time: " + str(elapse[0]) + " mins, " + str(elapse[1]) + " secs" 
        
        # Processor Information
        self.processor_events.sort(key=lambda evt: evt.timestamp)
        print "Number of Processors: " + str(len(self.processor_events))
        print "\tNumber of Forked Processors Instances: " + str(len(self.processor_fork_events))
        print "\tFirst Processor Up: " + str(self.processor_events[0].timestamp)
        print "\tLast Processor Up: " + str(self.processor_events[-1].timestamp)

        # Job Processor Completion
        self.job_processing_status_events.sort(key=lambda evt: evt.timestamp)

        completed_processing_event =  None
        for job_proc_evt in self.job_processing_status_events:
            status = job_proc_evt.contents.split()[-1].split('/')

            # Find when 100% of the jobs are reported to be completed processing
            if status[0] == status[1]:
                completed_processing_event = job_proc_evt
                break

        print "Final Processing Time: " + str(completed_processing_event.timestamp)
        elapse = self.time_elapse(self.scheduler_start_event, completed_processing_event)
        print "Processing Elapsed Time: " + str(elapse[0]) + " mins, " + str(elapse[1]) + " secs" 
Beispiel #34
0
###
# Use the Azure Storage Storage SDK for Python to read each message from the Queue
###
print(
    '\nWith some messages in our Azure Storage Queue, let\'s read the first message in the Queue to signal we start to process that customer\'s order.'
)
input('Press Enter to continue...')

# When you get each message, they become hidden from other parts of the applications being able to see it.
# Once you have successfully processed the message, you then delete the message from the Queue.
# This behavior makes sure that if something goes wrong in the processing of the message, it is then dropped back in the Queue for processing in the next cycle.
messages = queue_service.get_messages('pizzaqueue')
for message in messages:
    print(('\n' + message.content))
    queue_service.delete_message('pizzaqueue', message.id, message.pop_receipt)

input('\nPress Enter to continue...')
metadata = queue_service.get_queue_metadata('pizzaqueue')

print(
    'If we look at the Queue again, we have one less message to show we have processed that order and a yummy pizza will be on it\'s way to the customer soon.'
)
print(('Number of messages in the queue: ' +
       str(metadata.approximate_message_count)))
input('\nPress Enter to continue...')

###
# This was a quick demo to see Queues in action.
# Although the actual cost is minimal since we deleted all the messages from the Queue, it's good to clean up resources when you're done
###
class AzureService(object):
    VISIBILITY_TIMEOUT = 5*60

    def __init__(self, connection_string, container_name, queue_get, queue_push, logger=None):
        self.ctnname = container_name
        self.getname = queue_get
        self.pushname = queue_push
        
        self.qs = QueueService(connection_string=connection_string,
                               protocol='https',
#                                endpoint_suffix='core.windows.net'
                                )
        self.bs = BlockBlobService(connection_string=connection_string)
        self.qs.create_queue(self.getname, timeout=1)
        self.qs.create_queue(self.pushname, timeout=1)
        self.bs.create_container(self.ctnname, timeout=1)
        if logger: logger.info('Init Azure success')
    
    def pushMessage(self, message, qname=None, logger=None):
        if qname is None:
            qname = self.pushname
        try:
            self.qs.put_message(self.pushname, message) 
        except Exception as e:
            if logger:
                logger.exception('ERROR PUSH MESSAGE ')
            else:
                print 'ERROR PUSH MESSAGE '
                print e
        
    def getMessage(self, qname=None, num=1, logger=None):
        if qname is None:
            qname = self.getname
        try:
            message = self.qs.get_messages(qname, num, visibility_timeout=self.VISIBILITY_TIMEOUT)
        except Exception as e:
            if logger:
                logger.exception('ERROR GET MESSAGE ')
            else:
                print 'ERROR GET MESSAGE '
                print e
            return []
        return message
    
    def getReceiptInfo(self, logger=None):
        message = self.getMessage(logger=logger)
        if len(message) > 0:
            rinfo = ReceiptSerialize.fromjson(message[0].content)   
            return message[0], rinfo
        else:
            return None, None
        
    def count(self):
        metadata_get = self.qs.get_queue_metadata(self.getname)
        metadata_push = self.qs.get_queue_metadata(self.pushname)
        generator = self.bs.list_blobs(self.ctnname)
        bc = 0
        for blob in generator:
            bc += 1
        return {'get_count' : metadata_get.approximate_message_count, 
                'push_count': metadata_push.approximate_message_count,
                'blob_count': bc
                } 
    
    def uploadFolder(self, folderpath, logger):
        for filename in os.listdir(folderpath):
            if len(filename) > 4:
                suffix = filename[-4:].upper()
            else:
                continue
            if '.JPG' == suffix or 'JPEG' == suffix:
                receipt_metadata = ReceiptSerialize()
                receipt_metadata.receiptBlobName = unicode(filename, 'utf-8')
                self.qs.put_message(self.getname, b64encode(receipt_metadata.toString()).decode('utf-8')) 
                self.bs.create_blob_from_path(self.ctnname, receipt_metadata.receiptBlobName, os.path.join(folderpath, filename), max_connections=2, timeout=None)
                logger.info('upload %s', filename)
    
    def getImage(self, imgname, logger=None):
        localpath= os.path.join(args.download_dir, imgname)
        try:
            self.bs.get_blob_to_path(self.ctnname, imgname, localpath)
        except AzureMissingResourceHttpError as e:
            if logger:
                logger.error('Blob named ' + imgname + ' doesnot exist.' , exc_info=True)
            else:
                print 'Blob named ' + imgname + ' doesnot exist.' 
                print e            
            return ''
        except Exception as e:
            if logger:
                logger.error('Exception while getting blob.', exc_info=True)
            else:
                print 'Exception while getting blob.' 
                print e            
            return None
        return localpath
    
    def deleteMessage(self, message, qname=None, logger=None): 
        if qname is None:
            qname = self.getname
        try:
            self.qs.delete_message(qname, message.id, message.pop_receipt)
        except Exception as e:
            if logger:
                logger.exception('ERROR DELETE MESSAGE ')
            else:
                print 'ERROR DELETE MESSAGE '
                print e
                
    def deleteImage(self, imgname, logger=None):
        try: 
            self.bs.delete_blob(self.ctnname, imgname)
        except Exception as e:
            if logger:
                logger.exception('ERROR DELETE IMAGE ')
            else:
                print 'ERROR DELETE IMAGE '
                print e
        
    def cleanUp(self):
        count = 0
        print('deleted: ')
        while True:
            messages = self.qs.get_messages(self.getname)
            for message in messages:
                count += 1
                self.qs.delete_message(self.getname, message.id, message.pop_receipt)
            if len(messages) == 0: break
        print(str(count) + ' from queue-get')
        count = 0
        while True:
            messages = self.qs.get_messages(self.pushname)
            for message in messages:
                count += 1
                self.qs.delete_message(self.pushname, message.id, message.pop_receipt)
            if len(messages) == 0: break
        print(str(count) + ' from queue-push') 
        count = 0
        generator = self.bs.list_blobs(self.ctnname)
        for blob in generator:
            count += 1     
            self.bs.delete_blob(self.ctnname, blob.name)
        print(str(count) + ' from container') 
class AzureQueueStorageIn(InputModule):
    '''Consumes messages from Azure Queue Storage

    Consume messages from the Azure Queue Storage service.


    Parameters::

        - account_name(str)("wishbone")
           |  The account name to authenticate to

        - account_key(str)("wishbone")
           |  The account key to authenticate to the queue

        - auto_message_delete(bool)(True)
           |  Once the message is consumed from the queue delete it immediately.

        - b64decode(bool)(True)
           |  Decode the message payload.

        - destination(str)("data")
           |  The location write the payload to

        - endpoint_suffix(str)("core.windows.net")
           |  The endpoint suffix of the service

        - native_events(bool)(False)
           |  Whether to expect incoming events to be native Wishbone events

        - payload(str/dict/int/float)("test")
           |  The content of the test message.

        - queue_name(str)("wishbone")
           |  The name of the queue to consume

        - visibility_timeout(int)(None)
           |  The amount of time the consumed message remains invisible to
           |  other consumers before it gets deleted.

    Queues::

        - outbox
           |  Outgoing events.

        - delete
           |  Events to delete from Queue storage.
    '''
    def __init__(self,
                 actor_config,
                 destination="data",
                 payload=None,
                 native_events=False,
                 account_name="wishbone",
                 account_key="wishbone",
                 queue_name="wishbone",
                 endpoint_suffix='core.windows.net',
                 auto_message_delete=True,
                 visibility_timeout=None,
                 b64decode=True):

        InputModule.__init__(self, actor_config)
        self.pool.createQueue("outbox")
        self.pool.createQueue("delete")

        self.decode = Plain().handler

    def getMessages(self):

        try:
            self.queue_service = QueueService(
                account_name=self.kwargs.account_name,
                account_key=self.kwargs.account_key,
                endpoint_suffix=self.kwargs.endpoint_suffix,
            )
            self.queue_service.create_queue(self.kwargs.queue_name)
        except Exception as err:
            message = "Failed to connect to Azure Queue Service https://%s.queue.%s/%s Reason: " % (
                self.kwargs.account_name, self.kwargs.endpoint_suffix,
                self.kwargs.queue_name)
            raise Exception(message + str(err).partition("\n")[0])
        else:
            self.logging.info(
                "Connected to Azure Queue Service https://%s.queue.%s/%s" %
                (self.kwargs.account_name, self.kwargs.endpoint_suffix,
                 self.kwargs.queue_name))

        while self.loop():
            for message in self.queue_service.get_messages(
                    self.kwargs.queue_name,
                    visibility_timeout=self.kwargs.visibility_timeout):
                for event in self.processIncomingMessage(message):
                    self.submit(event, "outbox")
                if self.kwargs.auto_message_delete:
                    self.queue_service.delete_message(self.kwargs.queue_name,
                                                      message.id,
                                                      message.pop_receipt)

    def preHook(self):

        logger = logging.getLogger('azure.storage')
        logger.setLevel(logging.CRITICAL)

        self.sendToBackground(self.getMessages)
        self.sendToBackground(self.processDeleteMessage)

    def processIncomingMessage(self, message):

        if self.kwargs.b64decode:
            data = b64decode(message.content)
        else:
            data = message.content

        for chunk in [data, None]:
            for payload in self.decode(chunk):
                event = self.generateEvent(payload, self.kwargs.destination)
                event = self.__setMetaData(event, message)
                yield event

    def processDeleteMessage(self):

        while self.loop():
            event = self.pool.queue.delete.get()
            self.queue_service.delete_message(
                self.kwargs.queue_name, event.get('tmp.%s.id' % (self.name)),
                event.get('tmp.%s.pop_receipt' % (self.name)))

    def __setMetaData(self, event, message):

        metadata = {
            "id": message.id,
            "insertion_time": message.insertion_time.strftime('%s'),
            "expiration_time": message.expiration_time.strftime('%s'),
            "dequeue_count": message.dequeue_count,
            "pop_receipt": message.pop_receipt,
            "time_next_visible": message.time_next_visible.strftime('%s')
        }
        event.set(metadata, "tmp.%s" % (self.name))
        return event
Beispiel #37
0
        # This is our initial state: the target image.
        # Note that `scipy.optimize.fmin_l_bfgs_b` can only process flat vectors.
        x = preprocess_image(target_image_path)
        x = x.flatten()
        for i in range(iterations):
            print('Start of iteration', i)
            start_time = time.time()
            x, min_val, info = fmin_l_bfgs_b(evaluator.loss,
                                             x,
                                             fprime=evaluator.grads,
                                             maxfun=20)
            print('Current loss value:', min_val)
            # Save current generated image
            img = x.copy().reshape((img_height, img_width, 3))
            img = deprocess_image(img)
            fname = result_prefix + '_at_iteration_%d.png' % i
            #print(fname)
            imsave(fname, img)
            end_time = time.time()
            print('Image saved as', fname)
            print('Iteration %d completed in %ds' % (i, end_time - start_time))

        # Generate image
        imsave(output_image_path + output_image_name, img)
        saveToBlob(output_image_name, output_image_path + output_image_name)

        blob_image_url = 'https://imprinter.blob.core.windows.net/imprinted/' + output_image_name
        queue_service.put_message('imprintresults', blob_image_url)
        queue_service.delete_message(queue_name, message.id,
                                     message.pop_receipt)
Beispiel #38
0
# Pedro Perez - 2015
#
# -----------------------------------------------------------------------------
import sys
import os
import subprocess
from azure.storage.queue import QueueService

account_name = "mcdockerqueue"
account_key = sys.argv[1]
queuename = "servers"

queue_service = QueueService(account_name, account_key)

queue_metadata = queue_service.get_queue_metadata(queuename)
count = queue_metadata['x-ms-approximate-messages-count']

print "There are %s messages in the queue" % count


if count > 0:
	messages = queue_service.get_messages(queuename)

	for message in messages:
	    print(message.message_text)

	# Remove message from the queue
	queue_service.delete_message(queuename, message.message_id, message.pop_receipt)
else:
	print "There are no messages to process"