Example #1
0
def main(myblob: func.InputStream):
    #//TODO: Ensure queue name is 'weather-data'
    queue_name = 'weather-data'
    #//TODO: Add Storage Account Name and Key from  https://portal.azure.com/#@[user_email]/resource/subscriptions/[subscription_id]/resourceGroups/[resource_group_name]/providers/Microsoft.Storage/storageAccounts/[storage_account_name]/keys
    block_blob_service = BlockBlobService(account_name='//TODO: ', account_key=' //TODO')
    queue_service = QueueService(account_name='//TODO', account_key='//TODO')
    queue_service.encode_function = QueueMessageFormat.text_base64encode
    file_name = myblob.name.split("/")[1]
    #// Ensure that files are added to a blob container named 'weather-data'
    block_blob_service.get_blob_to_path('weather-data', file_name, file_name)   
    with open(file_name, "r+") as file:
        reader = csv.reader(file)
        for idx, data in enumerate(reader):
            if idx != 0:
                if(len(data)> 13):
                    city, country = data[6].split(",")
                    datapoint =json.dumps({"date": data[1],"city": city, "country": country, "temperature": data[13][:-2]})   
                    queue_service.put_message(queue_name, datapoint)
                else:
                    logging.info(len(data))

    logging.info(f"Python blob trigger function processed blob \n"
                 f"Name: {myblob.name}\n"
                 f"Blob Size: {myblob.length} bytes\n"
                )
Example #2
0
class AzureProvider(BaseProvider):
    """Queue Provider for the Microsoft Azure."""

    def __init__(self, account_name, account_key):
        self.queue_service = QueueService(
            account_name=account_name, account_key=account_key
        )

    def put_message(self, queue_name: str, message: str):
        self.queue_service.put_message(
            queue_name=queue_name,
            message_text=message,
        )

    def get_message(self, queue_name: str):
        try:
            queue_message = self.queue_service.get_messages(
                queue_name=queue_name, numofmessages=1
            ).queue_messages[0]
        except IndexError:
            return {}
        else:
            return {
                attr_name: getattr(queue_message, attr_name)
                for attr_name in dir(queue_message)
                if not attr_name.startswith('_')
            }

    def delete_message(self, queue_name: str, message: dict):
        message_id = message.get('message_id')
        pop_receipt = message.get('pop_receipt')

        self.queue_service.delete_message(
            queue_name, message_id=message_id, popreceipt=pop_receipt,
        )
Example #3
0
class AzureQueue:
    
    def __init__(self,accountName,key):
        
        """
            Initialize a new connection to azure queue stoage

            @param accountName : the storage account in azure
            @param key : the api key
        """
        
        self._accountName = accountName
        self._key = key
        self._innerService = QueueService(account_name=self._accountName, account_key=self._key)
    
    def push_message(self,queue,message):

        """
            Enqueue a new message into the given queue

            @param queue : the queue name
            &param message : the message to put into the queue

        """

        if not queue:
            raise ValueError("queue name is mandatory ")

        if not message:
            raise ValueError("message is mandatory")
            
        self._innerService.put_message(queue,message)
def render_video(request):
    template = loader.get_template('app/render_video.html')
    vidstatus = 'No Video Found.'

    queue_service = QueueService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY'])
    messages = queue_service.get_messages(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], num_messages=1, visibility_timeout=1*60)
    
    for message in messages:
        vidstatus = 'Queued for Rendering: ' + message.content
        message_obj = json.loads(message.content)

        access_token = ams_authenticate()['access_token']
        
        asset = ams_post_request(access_token, "Assets", {
            'Name': message_obj['filename'], 
            'AlternateId': message_obj['folder']})
        
        asset_container = urllib.parse.urlparse(asset['Uri']).path[1:]

        asset_file = ams_post_request(access_token, "Files", {
            'IsEncrypted': 'false',
            'IsPrimary': 'false',
            'MimeType': 'video/mp4',
            'ContentFileSize': message_obj['size'],
            'Name': message_obj['filename'],
            'ParentAssetId': asset['Id']})

        block_blob_service = BlockBlobService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY'])
        from_url = block_blob_service.make_blob_url(os.environ['SVPD_STORAGE_ACCOUNT_UPLOADED'], message_obj['folder'] + '/' + message_obj['filename'])
        block_blob_service.copy_blob(asset_container, message_obj['filename'], from_url)

        job = ams_verbose_post_request(access_token, "Jobs", {
            'Name': message_obj['filename'], 
            'InputMediaAssets': [{
                '__metadata': { 'uri': os.environ['AMS_API_ENDPOINT'] + 'Assets(\'' + asset['Id'] + '\')' }
            }],
            'Tasks': [{
                'Name': 'Adaptive Streaming Task',
                'Configuration': 'Adaptive Streaming',
                'MediaProcessorId': 'nb:mpid:UUID:ff4df607-d419-42f0-bc17-a481b1331e56',
                'TaskBody': '<?xml version="1.0" encoding="utf-16"?><taskBody><inputAsset>JobInputAsset(0)</inputAsset><outputAsset assetCreationOptions="0" assetFormatOption="0" assetName="' + message_obj['filename'] + ' - MES v1.1" storageAccountName="' + os.environ['SVPD_STORAGE_ACCOUNT_NAME'] + '">JobOutputAsset(0)</outputAsset></taskBody>'
            },{
                'Name': 'Indexing Task',
                'Configuration': '<?xml version="1.0" encoding="utf-8"?><configuration version="2.0"><input><metadata key="title" value="blah" /></input><settings></settings><features><feature name="ASR"><settings><add key="Language" value="English" /><add key="GenerateAIB" value="False" /><add key="GenerateKeywords" value="True" /><add key="ForceFullCaption" value="False" /><add key="CaptionFormats" value="ttml;sami;webvtt" /></settings></feature></features></configuration>',
                'MediaProcessorId': 'nb:mpid:UUID:233e57fc-36bb-4f6f-8f18-3b662747a9f8',
                'TaskBody': '<?xml version="1.0" encoding="utf-16"?><taskBody><inputAsset>JobInputAsset(0)</inputAsset><outputAsset assetCreationOptions="0" assetFormatOption="0" assetName="' + message_obj['filename'] + ' - Indexed" storageAccountName="' + os.environ['SVPD_STORAGE_ACCOUNT_NAME'] + '">JobOutputAsset(1)</outputAsset></taskBody>'
            }]
            })

        queue_service.put_message(os.environ['SVPD_STORAGE_ACCOUNT_ENCODING'], json.dumps({ 
            'filename': message_obj['filename'],
            'folder': message_obj['folder'],
            'size': message_obj['size'],
            'job': job['d']}))

        queue_service.delete_message(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], message.id, message.pop_receipt)   

    return HttpResponse(template.render({
        'vidstatus': vidstatus,
    }, request))
Example #5
0
class QueueBase(object):
    """docstring for QueueBase"""
    def __init__(self):
        super(QueueBase, self).__init__()
        self.queue_service = QueueService(
            account_name='boburstorage',
            account_key=
            'wRgukLsyhLtnI7qEk8mSGnIBC+IsiTTXEDF1/xnmBGDudJLSeYdtyuVzuSN5/cplJz88AJPyoVyjCmL9N1ECXw=='
        )

    def save_message_to_queue(self, queue, data):
        self.queue_service.put_message(queue, data)
        print('url added into queue...\n')

    def get_data_from_url(self):
        print('reading data from blob using url in queue...\n')
        data = self.queue_service.get_messages('taskqueue')

        response = urllib2.urlopen(data[0].content)
        numbers = response.read()

        self.queue_service.delete_message('taskqueue', data[0].id,
                                          data[0].pop_receipt)
        return numbers.splitlines()

    def get_messages_from_queue(self, queue):
        return self.queue_service.get_messages(queue)

    def delete_message_from_queue(self, queue, message):
        self.queue_service.delete_message(queue, message.id,
                                          message.pop_receipt)
Example #6
0
class AzureStorageQueue(Common.Contracts.Queue):
    def __init__(self, queue_name, config: AzureStorageConfig):
        self._queue_name = queue_name
        self._queue_service = QueueService(account_name=config.account_name,
                                           account_key=config.account_key)

        self._queue_service.encode_function = QueueMessageFormat.text_base64encode
        self._queue_service.decode_function = QueueMessageFormat.text_base64decode

    def push(self, message):
        self._queue_service.create_queue(self._queue_name)
        self._queue_service.put_message(self._queue_name, message)

    def pop(self):
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            result = message.content
            self._queue_service.delete_message(self._queue_name, message.id,
                                               message.pop_receipt)
            return result

    def peek(self):
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            return message.content
Example #7
0
class QueueBase(object):
    """docstring for QueueBase"""
    def __init__(self):
        super(QueueBase, self).__init__()
        self.queue_service = QueueService(
            account_name='bobur',
            account_key=
            '6e60FZapOXAmUbFBw0SpE1lHRP3RkXOMYRaalWmRBoz4+xI5tvjaJzxXuYyt+yfWxjPXpz5X3PmyIFiQmSkjbw=='
        )

    def save_message_to_queue(self, queue, data):
        self.queue_service.put_message(queue, data)
        print('url added into queue...\n')

    def get_data_from_url(self):
        print('reading data from blob using url in queue...\n')
        data = self.queue_service.get_messages('taskqueue')

        response = urllib2.urlopen(data[0].content)
        numbers = response.read()

        self.queue_service.delete_message('taskqueue', data[0].id,
                                          data[0].pop_receipt)
        return numbers.splitlines()

    def get_messages_from_queue(self, queue):
        return self.queue_service.get_messages(queue)

    def delete_message_from_queue(self, queue, message):
        self.queue_service.delete_message(queue, message.id,
                                          message.pop_receipt)
def enviar_aquivos_audio_blob(main_app, dir="audio_files/"):
    for file in glob.glob(dir + "*.wav"):
        try:
            print("Processando arquivo " + file + "...")
            meeting_code = file.split("_")[1].split("/")[1]
            blob = meeting_code + "/" + file
            print("Meeting code " + str(meeting_code))
            blob_service = BlockBlobService(account_name=ACCOUNT_NAME,
                                            account_key=ACCOUNT_KEY)
            blob_service.create_blob_from_path(CONTAINER_NAME, blob, file)

            if os.path.exists(file):
                os.remove(file)

            queue_service = QueueService(account_name=ACCOUNT_NAME,
                                         account_key=ACCOUNT_KEY)
            queue_service.encode_function = QueueMessageFormat.text_base64encode
            payload = {
                "meeting-code": meeting_code,
                "blob": blob,
                "file-name": util.get_file_with_extension(file)
            }

            payload = json.dumps(payload, ensure_ascii=False)

            queue_service.put_message(QUEUE_NAME_AUDIO, payload)
            print("Arquivo " + file + " processado com sucesso.")

            main_app.mensagem["text"] = "File " + file + " synced successfully"

        except:
            traceback.format_exc()
Example #9
0
class AzureQueue(object):
    def __init__(self, queue_name):
        self.conn = QueueService(account_name=os.getenv('AZURE_ACCOUNT_NAME'),
                                 account_key=os.getenv('AZURE_ACCOUNT_KEY'))
        self.queue_name = queue_name
        self.conn.create_queue(queue_name)
        self.conn.encode_function = QueueMessageFormat.binary_base64encode
        self.conn.decode_function = QueueMessageFormat.binary_base64decode

    def enqueue(self, func, *args, **kwargs):
        task = SimpleTask(func, *args, **kwargs)
        serialized_task = pickle.dumps(task, protocol=pickle.HIGHEST_PROTOCOL)
        self.conn.put_message(self.queue_name, serialized_task)
        return task.id

    def dequeue(self):
        messages = self.conn.get_messages(self.queue_name)
        if len(messages) == 1:
            serialized_task = messages[0]
            task = pickle.loads(serialized_task.content)
            self.conn.delete_message(self.queue_name, serialized_task.id,
                                     serialized_task.pop_receipt)
            return task

    def get_length(self):
        metadata = self.conn.get_queue_metadata(self.queue_name)
        return metadata.approximate_message_count
Example #10
0
class QueueStorageHandler(logging.Handler):
    """
    Handler class which sends log messages to a Azure Storage queue.
    """
    def __init__(self, 
                 account_name=None,
                 account_key=None,
                 protocol='https',
                 queue='logs',
                 message_ttl=None,
                 visibility_timeout=None,
                 base64_encoding=False,
                 is_emulated=False,
                 ):
        """
        Initialize the handler.
        """
        logging.Handler.__init__(self)
        self.service = QueueService(account_name=account_name,
                                    account_key=account_key,
                                    is_emulated=is_emulated,
                                    protocol=protocol)
        self.meta = {'hostname': gethostname(), 'process': os.getpid()}
        self.queue = _formatName(queue, self.meta)
        self.queue_created = False
        self.message_ttl = message_ttl
        self.visibility_timeout = visibility_timeout
        self.base64_encoding = base64_encoding

    def emit(self, record):
        """
        Emit a record.

        Format the record and send it to the specified queue.
        """
        try:
            if not self.queue_created:
                self.service.create_queue(self.queue)
                self.queue_created = True
            record.hostname = self.meta['hostname']
            msg = self._encode_text(self.format(record))
            self.service.put_message(self.queue,
                                     msg,
                                     self.visibility_timeout,
                                     self.message_ttl)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def _encode_text(self, text):
        if self.base64_encoding:
            text = b64encode(text.encode('utf-8')).decode('ascii')
        # fallback for the breaking change in azure-storage 0.33
        elif sys.version_info < (3,):
            if not isinstance(text, unicode):
                text = text.decode('utf-8')
        return text
Example #11
0
def check():
    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)
    queue_service.create_queue('monitoring')
    while True:
        print("Adding new user to scrap...")
        queue_service.put_message('monitoring',
                                  f'monitoring_{int(time.time())}')
        time.sleep(10)
Example #12
0
class QueueStorageHandler(logging.Handler):
    """
    Handler class which sends log messages to a Azure Storage queue.
    """
    def __init__(
        self,
        account_name=None,
        account_key=None,
        protocol='https',
        queue='logs',
        message_ttl=None,
        visibility_timeout=None,
        base64_encoding=False,
        is_emulated=False,
    ):
        """
        Initialize the handler.
        """
        logging.Handler.__init__(self)
        self.service = QueueService(account_name=account_name,
                                    account_key=account_key,
                                    is_emulated=is_emulated,
                                    protocol=protocol)
        self.meta = {'hostname': gethostname(), 'process': os.getpid()}
        self.queue = _formatName(queue, self.meta)
        self.queue_created = False
        self.message_ttl = message_ttl
        self.visibility_timeout = visibility_timeout
        self.base64_encoding = base64_encoding

    def emit(self, record):
        """
        Emit a record.

        Format the record and send it to the specified queue.
        """
        try:
            if not self.queue_created:
                self.service.create_queue(self.queue)
                self.queue_created = True
            record.hostname = self.meta['hostname']
            msg = self._encode_text(self.format(record))
            self.service.put_message(self.queue, msg, self.visibility_timeout,
                                     self.message_ttl)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def _encode_text(self, text):
        if self.base64_encoding:
            text = b64encode(text.encode('utf-8')).decode('ascii')
        # fallback for the breaking change in azure-storage 0.33
        elif sys.version_info < (3, ):
            if not isinstance(text, unicode):
                text = text.decode('utf-8')
        return text
Example #13
0
def writeToAzureQueue(outputList):
    # send the output to the queue
    logging.info("outputqueueLength = %s" % (str(len(outputList))))
    connect_str = os.getenv("AzureWebJobsStorage")
    queue_name = os.getenv("glossaryOutPutQueue")
    queue_service = QueueService(connection_string=connect_str)
    queue_service.encode_function = QueueMessageFormat.binary_base64encode
    queue_service.decode_function = QueueMessageFormat.binary_base64decode
    queue_service.put_message(queue_name,
                              json.dumps(outputList).encode('utf-8'))
Example #14
0
class AzureStorageQueue(Queue):
    """Interface for interacting with an Azure Storage Queue (through the Queue
    contract)"""
    def __init__(self, queue_name, config: AzureStorageConfig):
        """Initializes the storage queue.

        :param queue_name: The name of the queue to access.
        If a queue with this name doesn't already exist on the
        storage account, the queue will be created on the first operation.
        :param config: AzureStorageConfig with a valid account name and
        account key
        """
        self._queue_name = queue_name
        self._queue_service = QueueService(account_name=config.account_name,
                                           account_key=config.account_key)

        self._queue_service.encode_function = \
            QueueMessageFormat.text_base64encode
        self._queue_service.decode_function = \
            QueueMessageFormat.text_base64decode

    def push(self, message):
        """Pushes a new message onto the queue."""
        self._queue_service.create_queue(self._queue_name)
        self._queue_service.put_message(self._queue_name, message)

    def pop(self):
        """Pops the first message from the queue and returns it."""
        self._queue_service.create_queue(self._queue_name)

        # get_messages prevents another client from getting the message
        # before we've had a chance to delete it. The visibility_timeout
        # prevents the message from being seen by other clients
        # for X seconds.
        messages = self._queue_service.get_messages(self._queue_name,
                                                    visibility_timeout=30)
        for message in messages:
            result = message.content
            self._queue_service.delete_message(self._queue_name, message.id,
                                               message.pop_receipt)
            return result

    def peek(self):
        """Peeks the fist message from the queue and returns it."""
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            return message.content

    @staticmethod
    def create(queue_name: str):
        """Helper function for creating a Azure Storage Queue from the
        storage_config property defined inside of AzureConfig."""
        azure_config = AzureConfig()
        return AzureStorageQueue(queue_name, azure_config.storage_config)
Example #15
0
def get_new_proxy():
    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)
    messages = queue_service.get_messages('proxies')
    if messages:
        for message in messages:
            new_proxy = message.content
            queue_service.delete_message('proxies', message.id,
                                         message.pop_receipt)
        queue_service.put_message('proxies', new_proxy)
    return new_proxy
Example #16
0
def detail(request, question_id):
    queue_service = QueueService(account_name='firstsep9ed1', account_key='W+XOdAGrNzjtqTjCopNYZ5wX09Rfy1MTNLGwGnza6eofPkVqXMePyC5ovA+rkd3m4nDxEJeYcY0wCEcodLxyWQ==')
    #queue_service.create_queue('taskqueue')
    #message = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
    message = base64.b64encode(bytes(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), 'utf-8'))
    queue_service.put_message('test-python-queue', message.decode('ascii'))

    try:
        question = Question.objects.get(pk=question_id)
    except Question.DoesNotExist:
        raise Http404("Question does not exist")
    return render(request, 'polls/detail.html', {'question': question})
Example #17
0
class AzureInvoker(object):
    def __init__(self, account, key, queue_name):

        self.queue_service = QueueService(account_name=account,
                                          account_key=key)
        self.queue_service.encode_function = QueueMessageFormat.text_base64encode
        self.queue = queue_name
        self.TIME_LIMIT = True

    def invoke(self, payload):
        self.queue_service.put_message(
            self.queue,
            json.dumps(payload, ensure_ascii=False).decode('utf-8'))
        return {}

    def config(self):
        return {"function_name": self.queue}
Example #18
0
def main(myblob: func.InputStream):
    logging.info(
        f"Python blob trigger function processed blob \n"
        f"Name: {myblob.name[len(os.environ['remoteStorageInputContainer'])+1:]}\n"
        f"Blob Size: {myblob.length} bytes")

    name = myblob.name[len(os.environ['remoteStorageInputContainer']) + 1:]
    logging.info(name)

    queue_service = QueueService(
        account_name=os.environ['remoteStorageAccountName'],
        account_key=os.environ['remoteStorageAccountKey'])
    queue_service.encode_function = QueueMessageFormat.text_base64encode

    now = datetime.strftime(datetime.now(), "%Y-%m-%dT%H:%M:%S%Z")
    video = "{\"filename\":\"" + name + "\", \"location\":\"Utrecht\", \"track\":\"5b\", \"timestamp\":\"" + now + "\"}"
    #video_base64 = base64.b64encode(bytes(video, 'utf-8'))

    queue_service.put_message(os.environ['AzureQueueName'], video)
Example #19
0
class WorkloadTracker(object):
    """
    Dedicated class to track important events during the running of the workload.
    """
    def __init__(self, logger):
        self.config = Config()
        self.logger = logger
        self.init_storage_services()

    def init_storage_services(self):
        """
        Initializes the storage service clients using values from config.py.
        :return: True on success. False on failure.
        :rtype: boolean
        """
        try:
            # creates instances of Azure QueueService
            self.workload_queue_service = QueueService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.workload_tracker_sas_token)
            self.workload_queue_service.create_queue(
                self.config.workload_tracker_queue_name)
            self.workload_queue_service.encode_function = models.QueueMessageFormat.noencode

            return True
        except Exception as ex:
            self.logger.Exception(ex, self.init_storage_services.__name__)
            return False

    def write(self, event_type, content=None):
        """
        Write the event to the dedicated workload tracker queue
        """
        # create an event
        evt = WorkloadEvent()
        evt.event_type = int(event_type)
        evt.content = content

        # write serialized event to Azure queue
        serialized_event = json.dumps(evt.__dict__)
        self.workload_queue_service.put_message(
            self.config.workload_tracker_queue_name, serialized_event)
Example #20
0
def send_tweet(tweet_message, in_reply_to, entity):
    queue_name = settings.AZURE_QUEUE_NAME
    queue_service = QueueService(
        account_name=settings.TWITTERBOT_STORAGE_ACCOUNT_NAME,
        account_key=settings.TWITTERBOT_STORAGE_ACCOUNT_KEY)
    queue_service.create_queue(queue_name)

    queue_message = {
        'id': entity['id'],
        'tweet': {
            'status': tweet_message,
            'in_reply_to_status_id': in_reply_to
        },
        'percentiles': entity['percentiles']
    }

    queue_service.put_message(
        queue_name,
        base64.b64encode(
            json.dumps(queue_message).encode('utf-8')).decode('utf-8'))
Example #21
0
def queue():

    account_name = config.STORAGE_ACCOUNT_NAME
    account_key = config.STORAGE_ACCOUNT_KEY

    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)

    print("Creating task queue")
    task_queue_name = config.TASK_QUEUE_NAME
    queue_service.create_queue(task_queue_name)
    print("Task queue created")

    queue_service.put_message(task_queue_name, u'message1')

    messages = queue_service.get_messages(task_queue_name, num_messages=16)
    for message in messages:
        print(message.content)
        queue_service.delete_message(task_queue_name, message.id,
                                     message.pop_receipt)
Example #22
0
def generate(id, n):
    global TERMINATE

    queue_service = QueueService(account_name='izastorm',
                    account_key='isUic1EEbXg8l53zUrl+o1Jmf8JPze/E8S5XQ3ActlrmpEmGqMSKdkSP/RTF4aFAdQmLeVy6DWT3pGJ1k/I2HA==')
    dateFormat = '%Y-%m-%d-%H-%M-%S'

    time.sleep(random.uniform(0.0, 5.0))

    while(True):
        data ={}
        datetimeValues = {}
        datetimeValues['format'] = dateFormat
        datetimeValues['source'] = 'RTC_DS1307'
        #datetimeValues['stationID'] = random.randint(0,10)
        datetimeValues['stationID'] = id
        datetimeValues['value'] = time.strftime(dateFormat)

        sensorsValues = {}
        sensorsValues['LDR'] = random.uniform(0.0,100.0)
        sensorsValues['DHT22_AH'] = random.uniform(0.0, 100.0)
        sensorsValues['BMP085_PRESSURE'] = random.uniform(90000.0, 110000.0)
        sensorsValues['DHT22_TEMP'] = random.uniform(0.0, 45.0)

        data['sensors'] = sensorsValues
        data['datetime'] = datetimeValues

        #print json.dumps(data).decode('utf-8')
        try:
            queue_service.put_message('stormtest', json.dumps(data).decode('utf-8'))
        except:
            time.sleep(random.uniform(0.0, 2.0))

        try:
            time.sleep(n)
        except KeyboardInterrupt:
            TERMINATE = False


        if TERMINATE:
            exit(0)
Example #23
0
def save_dic_to_blob(vid_id):
    # save dic as blob
    account_name = 'ctrlfvfunctionaa670'
    account_key = 'MoPjP9rLlfN8nK4+uejH6fSCwZHOqqvvfwVa6Ais3emwtGlly59oCS2Z8VQ+8OiKzzVwMghRImUPddVyMPAN9Q=='
    corpus_seg_container_name = "corpus-segments-container"
    blob_name = vid_id + ".txt"
    print("saving dic as blob...")
    block_blob_service = BlockBlobService(account_name, account_key)
    block_blob_service.create_blob_from_text(
        corpus_seg_container_name, blob_name,
        json.dumps(list(_time_transcript_dic.items())))
    # add message to asr-to-CorpusSegMerger queue
    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)
    queue_name = "asr-to-corpus-seg-merger-q"
    print('Creating message for queue:' + queue_name)
    message = {"ID": blob_name}
    message = json.dumps(message)
    message = base64.b64encode(message.encode("ascii")).decode()
    queue_service.put_message(queue_name, message)
    print("Sent message:" + message)
Example #24
0
def run_temperature():
    queue_service = QueueService(account_name, account_key)

    queue1 = "NormalTemp"
    queue2 = "HighTemp"
   

    queue_service.create_queue(queue1)
    print("Queue created successfully...")

    queue_service.create_queue(queue2)
    print("Queue created successfully...")

    for i in range(1, 10):
        temp = generateTemp()
        queue_service.put_message(queue1, temp)

        if(temp > 80):
            # Code for alert function will be plcaed here
            tkMessageBox.showinfo(message=temp"is the temperature")
            queue_service.put_message(queue2, temp)
Example #25
0
class TimeLineWorker:

    QUEUE_KEY = ''
    ACCOUNT_NAME = ''
    QUEUE_NAME = ''

    def __init__(self):
        self.queue_service = QueueService(account_name=TimeLineWorker.ACCOUNT_NAME, 
                                          account_key=TimeLineWorker.QUEUE_KEY)

    def insert_message(self):
        obj = {
            "message": "test message",
            "other_key": 10
        }
        message = unicode(json.dumps(obj))
        self.queue_service.put_message(TimeLineWorker.QUEUE_NAME, message)

    def get_next_message(self):
        messages = self.queue_service.get_messages(TimeLineWorker.QUEUE_NAME)
        for message in messages:
            print message.content
            self.queue_service.delete_message(TimeLineWorker.QUEUE_NAME, message.id, message.pop_receipt)
def upload_file(request):
    template = loader.get_template('app/upload_file.html')

    if request.method == 'POST' and request.FILES['myfile']:
        myfile = request.FILES['myfile']
        folder = datetime.datetime.now().strftime('%Y%m%d/')+str(uuid.uuid4())
        filename = folder + '/' + myfile.name

        # save the file to Azure Storage
        block_blob_service = BlockBlobService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY'])
        block_blob_service.create_blob_from_bytes(os.environ['SVPD_STORAGE_ACCOUNT_UPLOADED'], filename, myfile.read())
        
        # put a message into a queue letting the system know the video is ready for processing
        queue_service = QueueService(account_name=os.environ['SVPD_STORAGE_ACCOUNT_NAME'], account_key=os.environ['SVPD_STORAGE_ACCOUNT_KEY'])
        queue_service.put_message(os.environ['SVPD_STORAGE_ACCOUNT_READY_TO_ENCODE'], json.dumps({ 
            'filename': myfile.name,
            'folder': folder,
            'size': str(myfile.size)}))

        return HttpResponse(template.render({
            'uploaded_file_name': filename,
        }, request))

    return HttpResponse(template.render({ }, request))
Example #27
0
class AzureQueue(object):

  def __init__(self, account_name, account_key, queue_name):
    self.queue_name = queue_name
    self.queue_service = QueueService(account_name=account_name, account_key=account_key)
    self.queue_service.create_queue(self.queue_name)

  def put_message_into_queue(self, content) -> QueueMessage:
    """
    Publishes a message with `content`
    
    :param content: The queue message 

    :returns: A QueueMessage that has the message as well as metadata 
    :rtype: QueueMessage 
    """
    return self.queue_service.put_message(self.queue_name, content)

  def get_messages(self) -> list:
    """
    Retrieves all of the messages that have been published into queue 
    
    :param content: The queue message 

    :returns: List of Queue messages
    :rtype: list 
    """

    return self.queue_service.get_messages(self.queue_name)

  def delete_message_from_queue(self, message_id, pop_receipt):
    self.queue_service.delete_message(self.queue_name, message_id, pop_receipt)

  def get_message_count(self):
    queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
    return queue_metadata.approximate_message_count

  def delete(self):
    return self.queue_service.delete_queue(self.queue_name)

  def empty(self):
    messages = queue_service.get_messages(self.queue_name, 
                                          num_messages=BATCH_NUMBER, 
                                          visibility_timeout=TIMEOUT_IN_SECONDS) 
    for message in messages:
      self.queue_service.delete_message(self.queue_name, message.id, message.pop_receipt)
    def test_sas_add(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        token = self.qs.generate_shared_access_signature(
            self.test_queues[0],
            self._get_shared_access_policy(QueueSharedAccessPermissions.ADD),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.put_message(self.test_queues[0], 'addedmessage')

        # Assert
        result = self.qs.get_messages(self.test_queues[0])
        self.assertEqual('addedmessage', result[0].message_text)
    def test_sas_add(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        token = self.qs.generate_shared_access_signature(
            self.test_queues[0],
            self._get_shared_access_policy(QueueSharedAccessPermissions.ADD),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.put_message(self.test_queues[0], 'addedmessage')

        # Assert
        result = self.qs.get_messages(self.test_queues[0])
        self.assertEqual('addedmessage', result[0].message_text)
    def test_sas_add(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        queue_name = self._create_queue()
        token = self.qs.generate_queue_shared_access_signature(
            queue_name,
            QueuePermissions.ADD,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.put_message(queue_name, u'addedmessage')

        # Assert
        result = self.qs.get_messages(queue_name)
        self.assertEqual(u'addedmessage', result[0].content)
    def test_sas_add(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_name = self._create_queue()
        token = self.qs.generate_queue_shared_access_signature(
            queue_name,
            QueuePermissions.ADD,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.put_message(queue_name, u'addedmessage')

        # Assert
        result = self.qs.get_messages(queue_name)
        self.assertEqual(u'addedmessage', result[0].content)
Example #32
0
# Main thread - Check if port 25565 and move to the next if it's not. Once a free port has been found, create a new instance (up to 3)
while instanceCreated == 0:
        freeport = checkPort(defaultport)
        if freeport == 0:
                defaultport += 1
                counter += 1
                if counter > maxInstances:
                        print "Too many instances! you can't run more than %s instances" % maxInstances
                        sys.exit()
        else:
                if debug == 1: print "Port %s is free" % defaultport
                container = createMcInstance(defaultport)
                instanceCreated = 1
                counter +=1

# Return Instance ID, port and total number of instances once finished creating the server
print "Instance ID: %s" % container["Id"]
print "Listening on port %s" % defaultport
print "There are %s instances running on this server" % counter

# Update servers queue
queue_service = QueueService(account_name, account_key)
try:
	queue_service.create_queue(queuename)
except:
	print "Queue creation failed."

queue_service.put_message(queuename, '{"ContainerID":"%s", "ContainerPort":"%s", "ServerID":"%s"}' % container["Id"], defaultport, socket.gethostname())

sys.exit()
Example #33
0
        # This is our initial state: the target image.
        # Note that `scipy.optimize.fmin_l_bfgs_b` can only process flat vectors.
        x = preprocess_image(target_image_path)
        x = x.flatten()
        for i in range(iterations):
            print('Start of iteration', i)
            start_time = time.time()
            x, min_val, info = fmin_l_bfgs_b(evaluator.loss,
                                             x,
                                             fprime=evaluator.grads,
                                             maxfun=20)
            print('Current loss value:', min_val)
            # Save current generated image
            img = x.copy().reshape((img_height, img_width, 3))
            img = deprocess_image(img)
            fname = result_prefix + '_at_iteration_%d.png' % i
            #print(fname)
            imsave(fname, img)
            end_time = time.time()
            print('Image saved as', fname)
            print('Iteration %d completed in %ds' % (i, end_time - start_time))

        # Generate image
        imsave(output_image_path + output_image_name, img)
        saveToBlob(output_image_name, output_image_path + output_image_name)

        blob_image_url = 'https://imprinter.blob.core.windows.net/imprinted/' + output_image_name
        queue_service.put_message('imprintresults', blob_image_url)
        queue_service.delete_message(queue_name, message.id,
                                     message.pop_receipt)
def main(req: func.HttpRequest) -> func.HttpResponse:
    logging.info('Python HTTP trigger function processed a request.')

    # DefaultAzureCredential supports managed identity or environment configuration (see docs)
    credential = DefaultAzureCredential()

    # parse parameters
    storage_account_source = os.environ["par_storage_account_name_source"]
    storage_account_source_url = "https://" + storage_account_source + ".blob.core.windows.net"
    storage_account_backup = os.environ["par_storage_account_name_backup"]
    storage_account_backup_url = "https://" + storage_account_backup + ".blob.core.windows.net"

    # create blob client for backup and source
    credential = DefaultAzureCredential()
    client_source = BlobServiceClient(account_url=storage_account_source_url,
                                      credential=credential)
    client_backup = BlobServiceClient(account_url=storage_account_backup_url,
                                      credential=credential)

    # Create queue clients
    queue_service = QueueService(
        account_name=os.environ['par_storage_account_name_queue'],
        account_key=os.environ['par_storage_account_key_queue'])
    queue_service.encode_function = QueueMessageFormat.text_base64encode

    # Get all blobs in sourcecontainer
    container_source_list = client_source.list_containers()
    for container in container_source_list:
        # Log container name
        logging.info(container.name)
        container_source = client_source.get_container_client(container.name)

        # Get all blobs in container
        prev_blob_name = ""
        prev_blob_etag = ""
        blob_source_list = container_source.list_blobs(include=['snapshots'])
        for blob in blob_source_list:

            if blob.snapshot == None:
                # Blob that is not snapshot.
                # 1. Check if snapshot needs to be created
                if prev_blob_name != blob.name:
                    # New blob without snapshot, create snapshot/backup
                    logging.info("new blob" + blob.name +
                                 ", create snapshot/backup")
                    create_snapshot(client_source, queue_service,
                                    container.name, blob.name, blob.etag)
                elif prev_blob_etag != blob.etag:
                    # Existing blob that has changed, create snapshot/backup
                    logging.info(blob.name +
                                 "has changed, create snapshot/backup")
                    create_snapshot(client_source, queue_service,
                                    container.name, blob.name, blob.etag)

                # 2. Check if incremental backup needs to be created
                # get blob backup and source properties
                blob_source = client_source.get_blob_client(
                    container=container.name, blob=blob.name)
                source_last_modified = blob_source.get_blob_properties(
                )['last_modified']
                source_etag = str(
                    blob_source.get_blob_properties()['etag']).replace(
                        "\"", "")
                blob_name_backup = append_timestamp_etag(
                    blob.name, source_last_modified, source_etag)
                blob_backup = client_backup.get_blob_client(
                    container=container.name + "bak", blob=blob_name_backup)
                blob_exists = check_blob_exists(blob_backup)
                # Check if blob exists
                if blob_exists == False:
                    # Latest blob does not yet exist in backup, create message on queue to update
                    queue_json = "{" + "\"container\":\"{}\", \"blob_name\":\"{}\", \"etag\":\"{}\"".format(
                        container.name, blob.name, source_etag) + "}"
                    logging.info("backup needed for: " + queue_json)
                    queue_service.put_message(os.environ['par_queue_name'],
                                              queue_json)
                    #asyncio.run(copy_adf_blob_source_backup(blob_source, blob_backup))

            prev_blob_name = blob.name
            prev_blob_etag = blob.etag

    result = {"status": "ok"}
    return func.HttpResponse(str(result))
Example #35
0
#!/usr/bin/python
import sys
from azure.storage.queue import QueueService

account_name = "mcdockerqueue"
queuename = sys.argv[1]
account_key = sys.argv[2]


queue_service = QueueService(account_name, account_key)

try:
	queue_service.create_queue(queuename)
except:
	print "Queue creation failed."

for i in range(0,4):
	queue_service.put_message(queuename, 'Hello World %s' %i)
	print i
Example #36
0
import numpy as np
import json, base64, time
from azure.storage.queue import QueueService, QueueMessageFormat

########################################################################################################################
# Credentials

queue_service = QueueService(account_name='', account_key='')
queue_service.encode_function = QueueMessageFormat.text_base64encode

########################################################################################################################

# Job parameters
container = 'seismic'
partial_gradient_path = 'partial_gradients/'
full_gradient_path = 'full_gradients/'
gradient_name = 'test_grad_'
iteration = 1
maxiter = 3
batchsize = 100

# Encode msg and submit job
msg = container + '&' + partial_gradient_path + '&' + full_gradient_path + '&' + gradient_name + '&' + str(
    iteration) + '&' + str(maxiter) + '&' + str(batchsize)
queue_service.put_message('iterationqueue', msg)
class AzureService(object):
    VISIBILITY_TIMEOUT = 5*60

    def __init__(self, connection_string, container_name, queue_get, queue_push, logger=None):
        self.ctnname = container_name
        self.getname = queue_get
        self.pushname = queue_push
        
        self.qs = QueueService(connection_string=connection_string,
                               protocol='https',
#                                endpoint_suffix='core.windows.net'
                                )
        self.bs = BlockBlobService(connection_string=connection_string)
        self.qs.create_queue(self.getname, timeout=1)
        self.qs.create_queue(self.pushname, timeout=1)
        self.bs.create_container(self.ctnname, timeout=1)
        if logger: logger.info('Init Azure success')
    
    def pushMessage(self, message, qname=None, logger=None):
        if qname is None:
            qname = self.pushname
        try:
            self.qs.put_message(self.pushname, message) 
        except Exception as e:
            if logger:
                logger.exception('ERROR PUSH MESSAGE ')
            else:
                print 'ERROR PUSH MESSAGE '
                print e
        
    def getMessage(self, qname=None, num=1, logger=None):
        if qname is None:
            qname = self.getname
        try:
            message = self.qs.get_messages(qname, num, visibility_timeout=self.VISIBILITY_TIMEOUT)
        except Exception as e:
            if logger:
                logger.exception('ERROR GET MESSAGE ')
            else:
                print 'ERROR GET MESSAGE '
                print e
            return []
        return message
    
    def getReceiptInfo(self, logger=None):
        message = self.getMessage(logger=logger)
        if len(message) > 0:
            rinfo = ReceiptSerialize.fromjson(message[0].content)   
            return message[0], rinfo
        else:
            return None, None
        
    def count(self):
        metadata_get = self.qs.get_queue_metadata(self.getname)
        metadata_push = self.qs.get_queue_metadata(self.pushname)
        generator = self.bs.list_blobs(self.ctnname)
        bc = 0
        for blob in generator:
            bc += 1
        return {'get_count' : metadata_get.approximate_message_count, 
                'push_count': metadata_push.approximate_message_count,
                'blob_count': bc
                } 
    
    def uploadFolder(self, folderpath, logger):
        for filename in os.listdir(folderpath):
            if len(filename) > 4:
                suffix = filename[-4:].upper()
            else:
                continue
            if '.JPG' == suffix or 'JPEG' == suffix:
                receipt_metadata = ReceiptSerialize()
                receipt_metadata.receiptBlobName = unicode(filename, 'utf-8')
                self.qs.put_message(self.getname, b64encode(receipt_metadata.toString()).decode('utf-8')) 
                self.bs.create_blob_from_path(self.ctnname, receipt_metadata.receiptBlobName, os.path.join(folderpath, filename), max_connections=2, timeout=None)
                logger.info('upload %s', filename)
    
    def getImage(self, imgname, logger=None):
        localpath= os.path.join(args.download_dir, imgname)
        try:
            self.bs.get_blob_to_path(self.ctnname, imgname, localpath)
        except AzureMissingResourceHttpError as e:
            if logger:
                logger.error('Blob named ' + imgname + ' doesnot exist.' , exc_info=True)
            else:
                print 'Blob named ' + imgname + ' doesnot exist.' 
                print e            
            return ''
        except Exception as e:
            if logger:
                logger.error('Exception while getting blob.', exc_info=True)
            else:
                print 'Exception while getting blob.' 
                print e            
            return None
        return localpath
    
    def deleteMessage(self, message, qname=None, logger=None): 
        if qname is None:
            qname = self.getname
        try:
            self.qs.delete_message(qname, message.id, message.pop_receipt)
        except Exception as e:
            if logger:
                logger.exception('ERROR DELETE MESSAGE ')
            else:
                print 'ERROR DELETE MESSAGE '
                print e
                
    def deleteImage(self, imgname, logger=None):
        try: 
            self.bs.delete_blob(self.ctnname, imgname)
        except Exception as e:
            if logger:
                logger.exception('ERROR DELETE IMAGE ')
            else:
                print 'ERROR DELETE IMAGE '
                print e
        
    def cleanUp(self):
        count = 0
        print('deleted: ')
        while True:
            messages = self.qs.get_messages(self.getname)
            for message in messages:
                count += 1
                self.qs.delete_message(self.getname, message.id, message.pop_receipt)
            if len(messages) == 0: break
        print(str(count) + ' from queue-get')
        count = 0
        while True:
            messages = self.qs.get_messages(self.pushname)
            for message in messages:
                count += 1
                self.qs.delete_message(self.pushname, message.id, message.pop_receipt)
            if len(messages) == 0: break
        print(str(count) + ' from queue-push') 
        count = 0
        generator = self.bs.list_blobs(self.ctnname)
        for blob in generator:
            count += 1     
            self.bs.delete_blob(self.ctnname, blob.name)
        print(str(count) + ' from container') 
Example #38
0
def put_to_queue(name, content):
    queue_service = QueueService(account_name=config.AZURE_STORAGE_NAME, account_key=config.AZURE_STORAGE_KEY)
    queue_service.create_queue(name)
    queue_service.put_message(name, content)
class Queue:
    def __init__(self, account_name, account_key, queue_name="logqueue"):
        """Initialiaze a queue. The type is set by the
        'ACS_LOGGING_QUEUE_TYPE' environment variable. If it is set to
        'AzureStorageQueue' then values must be provided for
        'account_name' and 'account_key' which are values associated
        with the Azure Storage account. 'queue_name' is optional and
        defaults to 'logqueue'.

        """
        self.log = Log()
        self.queue_type = config.ACS_LOGGING_QUEUE_TYPE
        self.queue_name = queue_name
        # self.log.debug("Queue type: " + self.queue_type + " / " + self.queue_name)

        if self.queue_type == "AzureStorageQueue":
            self.createAzureQueues(account_name, account_key)
        elif self.queue_type == "LocalFile":
            self.file_queue = open(config.UNPROCESSED_LOG_FILE, "w+")
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def createAzureQueues(self, account_name, account_key):
        """
        Create a queue for unprocessed log messages. Entries in the queue
        will be dequeued, processed and deleted upon success.
        """
        self.queue_service = QueueService(account_name, account_key)
        self.queue_service.create_queue(self.queue_name)

    def close(self):
        """Perform any necessary clearnup on the queue
           at the end of a run.
        """
        if self.queue_type == "AzureStorageQueue":
            pass
        elif self.queue_type == "LocalFile":
            self.file_queue.close()
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def enqueue(self, msg, level="INFO"):
        msg = level + " - " + msg
        if self.queue_type == "LocalFile":
            file_queue.write(msg + "\n")
        elif self.queue_type == "AzureStorageQueue":
            self.queue_service.put_message(self.queue_name, msg)
        self.log.debug(msg)

    def dequeue(self):
        messages = []
        if self.queue_type == "LocalFile":
            with open(config.UNPROCESSED_LOG_FILE, "r") as f:
                messages = f.readlines()[1]
        elif self.queue_type == "AzureStorageQueue":
            messages = self.queue_service.get_messages(self.queue_name)
        return messages

    def delete(self, message):
        self.queue_service.delete_message(self.queue_name, message.message_id, message.pop_receipt)
        #  with open(config.PROCESSED_LOG_FILE, 'a') as processed:
        #    processed.write(log)
        #  os.remove(config.UNPROCESSED_LOG_FILE)

    def delete_queue(self, queue_name):
        self.queue_service.delete_queue(queue_name, False)

    def getLength(self):
        """
        Get the approximate length of the queue
        """
        queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
        count = queue_metadata["x-ms-approximate-messages-count"]
        return count