Example #1
0
    def get_queue_client_by_uri(queue_uri):
        storage = StorageUtilities.get_storage_from_uri(queue_uri)

        queue_service = QueueService(account_name=storage.storage_name, account_key=storage.key)
        queue_service.create_queue(storage.container_name)

        return queue_service, storage.container_name
Example #2
0
class AzureQueue(object):
    def __init__(self, queue_name):
        self.conn = QueueService(account_name=os.getenv('AZURE_ACCOUNT_NAME'),
                                 account_key=os.getenv('AZURE_ACCOUNT_KEY'))
        self.queue_name = queue_name
        self.conn.create_queue(queue_name)
        self.conn.encode_function = QueueMessageFormat.binary_base64encode
        self.conn.decode_function = QueueMessageFormat.binary_base64decode

    def enqueue(self, func, *args, **kwargs):
        task = SimpleTask(func, *args, **kwargs)
        serialized_task = pickle.dumps(task, protocol=pickle.HIGHEST_PROTOCOL)
        self.conn.put_message(self.queue_name, serialized_task)
        return task.id

    def dequeue(self):
        messages = self.conn.get_messages(self.queue_name)
        if len(messages) == 1:
            serialized_task = messages[0]
            task = pickle.loads(serialized_task.content)
            self.conn.delete_message(self.queue_name, serialized_task.id,
                                     serialized_task.pop_receipt)
            return task

    def get_length(self):
        metadata = self.conn.get_queue_metadata(self.queue_name)
        return metadata.approximate_message_count
    def get_queue_client_by_uri(queue_uri, session=None):
        storage = StorageUtilities.get_storage_from_uri(queue_uri, session)

        queue_service = QueueService(account_name=storage.storage_name, account_key=storage.key)
        queue_service.create_queue(storage.container_name)

        return queue_service, storage.container_name
    def get_queue_client_by_uri(queue_uri):
        queue_name, storage_name, key = StorageUtilities.get_storage_from_uri(queue_uri)

        queue_service = QueueService(account_name=storage_name, account_key=key)
        queue_service.create_queue(queue_name)

        return queue_service, queue_name
Example #5
0
    def get_queue_client_by_uri(queue_uri, session):
        storage = StorageUtilities.get_storage_from_uri(queue_uri, session)

        queue_service = QueueService(account_name=storage.storage_name,
                                     token_credential=storage.token)
        queue_service.create_queue(storage.container_name)

        return queue_service, storage.container_name
Example #6
0
def CreateQueue(azureQueueAccountName, azureQueueKey,
                azureQueueAnalysisResults):
    queue_service = QueueService(account_name=azureQueueAccountName,
                                 account_key=azureQueueKey)
    #create queue if doesnt exist
    if not queue_service.exists(azureQueueAnalysisResults):
        queue_service.create_queue(azureQueueAnalysisResults)
    return queue_service
class QueueStorageHandler(logging.Handler):
    """
    Handler class which sends log messages to a Azure Storage queue.
    """
    def __init__(self, 
                 account_name=None,
                 account_key=None,
                 protocol='https',
                 queue='logs',
                 message_ttl=None,
                 visibility_timeout=None,
                 base64_encoding=False,
                 is_emulated=False,
                 ):
        """
        Initialize the handler.
        """
        logging.Handler.__init__(self)
        self.service = QueueService(account_name=account_name,
                                    account_key=account_key,
                                    is_emulated=is_emulated,
                                    protocol=protocol)
        self.meta = {'hostname': gethostname(), 'process': os.getpid()}
        self.queue = _formatName(queue, self.meta)
        self.queue_created = False
        self.message_ttl = message_ttl
        self.visibility_timeout = visibility_timeout
        self.base64_encoding = base64_encoding

    def emit(self, record):
        """
        Emit a record.

        Format the record and send it to the specified queue.
        """
        try:
            if not self.queue_created:
                self.service.create_queue(self.queue)
                self.queue_created = True
            record.hostname = self.meta['hostname']
            msg = self._encode_text(self.format(record))
            self.service.put_message(self.queue,
                                     msg,
                                     self.visibility_timeout,
                                     self.message_ttl)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def _encode_text(self, text):
        if self.base64_encoding:
            text = b64encode(text.encode('utf-8')).decode('ascii')
        # fallback for the breaking change in azure-storage 0.33
        elif sys.version_info < (3,):
            if not isinstance(text, unicode):
                text = text.decode('utf-8')
        return text
Example #8
0
def check():
    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)
    queue_service.create_queue('monitoring')
    while True:
        print("Adding new user to scrap...")
        queue_service.put_message('monitoring',
                                  f'monitoring_{int(time.time())}')
        time.sleep(10)
Example #9
0
    def get_queue_client_by_uri(queue_uri):
        queue_name, storage_name, key = StorageUtilities.get_storage_from_uri(
            queue_uri)

        queue_service = QueueService(account_name=storage_name,
                                     account_key=key)
        queue_service.create_queue(queue_name)

        return queue_service, queue_name
Example #10
0
class QueueStorageHandler(logging.Handler):
    """
    Handler class which sends log messages to a Azure Storage queue.
    """
    def __init__(
        self,
        account_name=None,
        account_key=None,
        protocol='https',
        queue='logs',
        message_ttl=None,
        visibility_timeout=None,
        base64_encoding=False,
        is_emulated=False,
    ):
        """
        Initialize the handler.
        """
        logging.Handler.__init__(self)
        self.service = QueueService(account_name=account_name,
                                    account_key=account_key,
                                    is_emulated=is_emulated,
                                    protocol=protocol)
        self.meta = {'hostname': gethostname(), 'process': os.getpid()}
        self.queue = _formatName(queue, self.meta)
        self.queue_created = False
        self.message_ttl = message_ttl
        self.visibility_timeout = visibility_timeout
        self.base64_encoding = base64_encoding

    def emit(self, record):
        """
        Emit a record.

        Format the record and send it to the specified queue.
        """
        try:
            if not self.queue_created:
                self.service.create_queue(self.queue)
                self.queue_created = True
            record.hostname = self.meta['hostname']
            msg = self._encode_text(self.format(record))
            self.service.put_message(self.queue, msg, self.visibility_timeout,
                                     self.message_ttl)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def _encode_text(self, text):
        if self.base64_encoding:
            text = b64encode(text.encode('utf-8')).decode('ascii')
        # fallback for the breaking change in azure-storage 0.33
        elif sys.version_info < (3, ):
            if not isinstance(text, unicode):
                text = text.decode('utf-8')
        return text
    def get_queue_client_by_uri(queue_uri, session):
        storage = StorageUtilities.get_storage_from_uri(queue_uri, session)

        queue_service = QueueService(
            account_name=storage.storage_name,
            token_credential=storage.token)
        queue_service.create_queue(storage.container_name)

        return queue_service, storage.container_name
    def create_storage_queue(self, resource_properties):
        storage_client = StorageManagementClient(self.credentials, self.subscription_id)

        key = storage_client.storage_accounts.list_keys(resource_properties['ResourceGroupName'], resource_properties['AccountName']).keys[0].value

        queue_service = QueueService(account_name=resource_properties['AccountName'], account_key=key)

        queue_service.create_queue(resource_properties['Name'])

        return {
            'Name': resource_properties['Name']
        }
Example #13
0
class SimulationListener(Thread):
    def __init__(self):
        Thread.__init__(self)
        self._quit = Event()
        self.daemon = True
        self.log = logging.getLogger(__name__)

        settings = Settings()
        self.create_queue = 'create-simulated-machine'
        self.destroy_queue = 'destroy-simulated-machine'

        self.queue_service = QueueService(
            account_name=settings.azure_queue_account,
            account_key=settings.azure_queue_key
        )
        self.queue_service.create_queue(self.create_queue)
        self.queue_service.create_queue(self.destroy_queue)

    def run(self):
        # dislike of unstoppable threads
        while not self._quit.is_set():
            try:
                messages = self.queue_service.get_messages(self.create_queue, numofmessages=10)
                for m in messages:
                    machine_json = b64decode(m.message_text)
                    machine = json.loads(machine_json)
                    self.log.info("Creating: " + machine["Name"] + " on " + machine["Provider"])
                    self.log.debug(machine_json)
                    requests.post("http://localhost:8080/machines", machine_json)
                    self.queue_service.delete_message(self.create_queue, m.message_id, m.pop_receipt)
            except Exception as e:
                self.log.error(e.message)

            try:
                messages = self.queue_service.get_messages(self.destroy_queue, numofmessages=10)
                for m in messages:
                    machine_json = b64decode(m.message_text)
                    machine = json.loads(machine_json)
                    self.log.info("Deleting: " + machine["Name"] + " on " + machine["Provider"])
                    self.log.debug(machine_json)
                    requests.delete("http://localhost:8080/machines/" + machine["MachineId"])
                    self.queue_service.delete_message(self.destroy_queue, m.message_id, m.pop_receipt)
            except Exception as e:
                self.log.error(e.message)

            sleep(1)

    def quit(self):
        self._quit.set()
Example #14
0
    def create_queue_from_storage_account(storage_account, name, session):
        token = StorageUtilities.get_storage_token(session)

        queue_service = QueueService(
            account_name=storage_account.name,
            token_credential=token)
        return queue_service.create_queue(name)
Example #15
0
class AzureQueue(object):

  def __init__(self, account_name, account_key, queue_name):
    self.queue_name = queue_name
    self.queue_service = QueueService(account_name=account_name, account_key=account_key)
    self.queue_service.create_queue(self.queue_name)

  def put_message_into_queue(self, content) -> QueueMessage:
    """
    Publishes a message with `content`
    
    :param content: The queue message 

    :returns: A QueueMessage that has the message as well as metadata 
    :rtype: QueueMessage 
    """
    return self.queue_service.put_message(self.queue_name, content)

  def get_messages(self) -> list:
    """
    Retrieves all of the messages that have been published into queue 
    
    :param content: The queue message 

    :returns: List of Queue messages
    :rtype: list 
    """

    return self.queue_service.get_messages(self.queue_name)

  def delete_message_from_queue(self, message_id, pop_receipt):
    self.queue_service.delete_message(self.queue_name, message_id, pop_receipt)

  def get_message_count(self):
    queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
    return queue_metadata.approximate_message_count

  def delete(self):
    return self.queue_service.delete_queue(self.queue_name)

  def empty(self):
    messages = queue_service.get_messages(self.queue_name, 
                                          num_messages=BATCH_NUMBER, 
                                          visibility_timeout=TIMEOUT_IN_SECONDS) 
    for message in messages:
      self.queue_service.delete_message(self.queue_name, message.id, message.pop_receipt)
def __createstorage():
    global container_name
    global queue_service
    global block_blob_service
    global queue_service
    block_blob_service = BlockBlobService(
        account_name=STORAGE_ACCOUNT_NAME,
        account_key=STORAGE_ACCOUNT_KEY,
        endpoint_suffix=STORAGE_ACCOUNT_SUFFIX)
    timestr = time.strftime("%Y%m%d-%H%M%S")
    container_name = 'fromcamera' + timestr
    block_blob_service.create_container(container_name)
    block_blob_service.set_container_acl(container_name,
                                         public_access=PublicAccess.Container)
    queue_service = QueueService(account_name=STORAGE_ACCOUNT_NAME,
                                 account_key=STORAGE_ACCOUNT_KEY,
                                 endpoint_suffix=STORAGE_ACCOUNT_SUFFIX)
    queue_service.create_queue('fromcamera' + timestr)
Example #17
0
class WorkloadTracker(object):
    """
    Dedicated class to track important events during the running of the workload.
    """
    def __init__(self, logger):
        self.config = Config()
        self.logger = logger
        self.init_storage_services()

    def init_storage_services(self):
        """
        Initializes the storage service clients using values from config.py.
        :return: True on success. False on failure.
        :rtype: boolean
        """
        try:
            # creates instances of Azure QueueService
            self.workload_queue_service = QueueService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.workload_tracker_sas_token)
            self.workload_queue_service.create_queue(
                self.config.workload_tracker_queue_name)
            self.workload_queue_service.encode_function = models.QueueMessageFormat.noencode

            return True
        except Exception as ex:
            self.logger.Exception(ex, self.init_storage_services.__name__)
            return False

    def write(self, event_type, content=None):
        """
        Write the event to the dedicated workload tracker queue
        """
        # create an event
        evt = WorkloadEvent()
        evt.event_type = int(event_type)
        evt.content = content

        # write serialized event to Azure queue
        serialized_event = json.dumps(evt.__dict__)
        self.workload_queue_service.put_message(
            self.config.workload_tracker_queue_name, serialized_event)
Example #18
0
def queue():

    account_name = config.STORAGE_ACCOUNT_NAME
    account_key = config.STORAGE_ACCOUNT_KEY

    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)

    print("Creating task queue")
    task_queue_name = config.TASK_QUEUE_NAME
    queue_service.create_queue(task_queue_name)
    print("Task queue created")

    queue_service.put_message(task_queue_name, u'message1')

    messages = queue_service.get_messages(task_queue_name, num_messages=16)
    for message in messages:
        print(message.content)
        queue_service.delete_message(task_queue_name, message.id,
                                     message.pop_receipt)
Example #19
0
def send_tweet(tweet_message, in_reply_to, entity):
    queue_name = settings.AZURE_QUEUE_NAME
    queue_service = QueueService(
        account_name=settings.TWITTERBOT_STORAGE_ACCOUNT_NAME,
        account_key=settings.TWITTERBOT_STORAGE_ACCOUNT_KEY)
    queue_service.create_queue(queue_name)

    queue_message = {
        'id': entity['id'],
        'tweet': {
            'status': tweet_message,
            'in_reply_to_status_id': in_reply_to
        },
        'percentiles': entity['percentiles']
    }

    queue_service.put_message(
        queue_name,
        base64.b64encode(
            json.dumps(queue_message).encode('utf-8')).decode('utf-8'))
Example #20
0
class MoeHandler():

    def __init__(self, api, generator, queue_name, conn_str):
        self.api = api
        self.gen_util = Generate_util(generator)
        self.queue_name = queue_name
        self.queue_service = QueueService(connection_string=conn_str)
        self.queue_service.create_queue(queue_name)

    # loops forever, reading messages and sending to gen util for parsing. Then sends the message
    def read_messages(self):
        while 1:
            try:
                messages = self.queue_service.get_messages(self.queue_name)
                if not messages:
                    time.sleep(2)  # We'll wait a few seconds and check again.
                    continue
                for message in messages:
                    event = json.loads(message.content)
                    if event['msg_type'] == 'tweet':
                        self.handle_tweet(event)
                    elif event['msg_type'] == 'dm':
                        self.handle_dm(event)
                    self.queue_service.delete_message(
                        self.queue_name, message.id, message.pop_receipt)
            except Exception as e:
                print("Exception handling messages")
                print(e)

    def handle_dm(self, event):
        imgpth = self.gen_util.parse_msg(event['text'])
        media = self.api.media_upload(imgpth)
        self.api.send_direct_message(
            recipient_id=event['user_id'], text="Here's your generated anime girl!", attachment_type='media', attachment_media_id=media.media_id)
        return

    def handle_tweet(self, event):
        imgpth = self.gen_util.parse_msg(event['text'])
        self.api.update_with_media(imgpth, status="Here's your generated anime girl!",
                                   in_reply_to_status_id=event['respond_id'], auto_populate_reply_metadata=True)
        return
Example #21
0
def run_temperature():
    queue_service = QueueService(account_name, account_key)

    queue1 = "NormalTemp"
    queue2 = "HighTemp"
   

    queue_service.create_queue(queue1)
    print("Queue created successfully...")

    queue_service.create_queue(queue2)
    print("Queue created successfully...")

    for i in range(1, 10):
        temp = generateTemp()
        queue_service.put_message(queue1, temp)

        if(temp > 80):
            # Code for alert function will be plcaed here
            tkMessageBox.showinfo(message=temp"is the temperature")
            queue_service.put_message(queue2, temp)
Example #22
0
class AzureStorageQueue(Common.Contracts.Queue):
    def __init__(self, queue_name, config: AzureStorageConfig):
        self._queue_name = queue_name
        self._queue_service = QueueService(account_name=config.account_name,
                                           account_key=config.account_key)

        self._queue_service.encode_function = QueueMessageFormat.text_base64encode
        self._queue_service.decode_function = QueueMessageFormat.text_base64decode

    def push(self, message):
        self._queue_service.create_queue(self._queue_name)
        self._queue_service.put_message(self._queue_name, message)

    def pop(self):
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            result = message.content
            self._queue_service.delete_message(self._queue_name, message.id,
                                               message.pop_receipt)
            return result

    def peek(self):
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            return message.content
Example #23
0
def run(job, **kwargs):
    resource = kwargs.get("resource")
    env_id = "{{ env_id }}"
    env = Environment.objects.get(id=env_id)
    rh = env.resource_handler.cast()
    resource_group = "{{ resource_group }}"
    create_custom_fields_as_needed()

    storage_account = "{{ storage_account }}"
    azure_queue_name = "{{ azure_queue_name }}"

    set_progress("Connecting To Azure Management Service...")
    azure_client = get_azure_storage_client(rh)

    res = azure_client.storage_accounts.list_keys(resource_group,
                                                  storage_account)
    keys = res.keys
    set_progress("Connecting To Azure queues...")
    queue_service = QueueService(account_name="{{ storage_account }}",
                                 account_key=keys[0].value)

    set_progress("Creating a file...")
    if queue_service.exists(queue_name=azure_queue_name):
        return (
            "FAILURE",
            "Queue with this name already exists",
            "The queue can not be created.",
        )
    else:
        queue_service.create_queue("{{ azure_queue_name }}")
        resource.name = azure_queue_name
        resource.azure_storage_account_name = storage_account
        resource.azure_account_key = keys[0].value
        resource.azure_account_key_fallback = keys[1].value
        resource.azure_storage_queue_name = "Azure queues - " + azure_queue_name
        resource.save()

    return "Success", "", ""
Example #24
0
def set_up_queue(queue_name):
    account_name = input('Please provide Azure account name:')
    account_key = input('Please provide Azure access key:')

    queue_service = QueueService(account_name, account_key)

    print('Creating queue: {} ...'.format(queue_name))

    try:
        queue_service.create_queue(queue_name)
    except AzureException as exc:
        print('Cannot create queue, reason: {}'.format(exc), file=sys.stderr)
        sys.exit(1)
    else:
        print('Queue successfully created.')

    print('Generating SAS token...')

    expiry_date = datetime.date.today() + datetime.timedelta(days=30)

    print('SAS token will expire in', str(expiry_date))

    access_policy = AccessPolicy(
        expiry=str(expiry_date),
        permission=(
            QueueSharedAccessPermissions.READ +
            QueueSharedAccessPermissions.ADD +
            QueueSharedAccessPermissions.PROCESS
        ),
    )

    sas_token = queue_service.generate_shared_access_signature(
        queue_name, SharedAccessPolicy(access_policy),
    )

    print('Generated SAS token:', sas_token)
    print('Please use above token in QueueManager.')
Example #25
0
    def GetQueue():
        u = keys.Key().generate_keys()
        keys.Key().save_key(DocumentManagement.randomString(), u['privateKey'])
        #print(str(u['publicKey'], 'utf-8'))
        queue_service = QueueService(
            account_name=cfg.settings['STORAGE_ACCOUNT'],
            account_key=cfg.settings['STORAGE_ACCOUNT_KEY'])

        queueName = DocumentManagement.randomString()

        created = queue_service.create_queue(queueName)
        data = None

        if created == True:
            data = {'key': str(u['publicKey'], 'utf-8'), 'queue': queueName}
        return data
Example #26
0
class AzureStorageQueue(Queue):
    """Interface for interacting with an Azure Storage Queue (through the Queue
    contract)"""
    def __init__(self, queue_name, config: AzureStorageConfig):
        """Initializes the storage queue.

        :param queue_name: The name of the queue to access.
        If a queue with this name doesn't already exist on the
        storage account, the queue will be created on the first operation.
        :param config: AzureStorageConfig with a valid account name and
        account key
        """
        self._queue_name = queue_name
        self._queue_service = QueueService(account_name=config.account_name,
                                           account_key=config.account_key)

        self._queue_service.encode_function = \
            QueueMessageFormat.text_base64encode
        self._queue_service.decode_function = \
            QueueMessageFormat.text_base64decode

    def push(self, message):
        """Pushes a new message onto the queue."""
        self._queue_service.create_queue(self._queue_name)
        self._queue_service.put_message(self._queue_name, message)

    def pop(self):
        """Pops the first message from the queue and returns it."""
        self._queue_service.create_queue(self._queue_name)

        # get_messages prevents another client from getting the message
        # before we've had a chance to delete it. The visibility_timeout
        # prevents the message from being seen by other clients
        # for X seconds.
        messages = self._queue_service.get_messages(self._queue_name,
                                                    visibility_timeout=30)
        for message in messages:
            result = message.content
            self._queue_service.delete_message(self._queue_name, message.id,
                                               message.pop_receipt)
            return result

    def peek(self):
        """Peeks the fist message from the queue and returns it."""
        self._queue_service.create_queue(self._queue_name)
        messages = self._queue_service.peek_messages(self._queue_name)
        for message in messages:
            return message.content

    @staticmethod
    def create(queue_name: str):
        """Helper function for creating a Azure Storage Queue from the
        storage_config property defined inside of AzureConfig."""
        azure_config = AzureConfig()
        return AzureStorageQueue(queue_name, azure_config.storage_config)
Example #27
0
class AzureFunctionAppBackend:
    """
    A wrap-up around Azure Function Apps backend.
    """
    def __init__(self, config):
        self.log_level = os.getenv('CLOUDBUTTON_LOGLEVEL')
        self.name = 'azure_fa'
        self.config = config

        self.fa_client = FunctionAppClient(self.config)
        self.queue_service = QueueService(
            account_name=self.config['account_name'],
            account_key=self.config['account_key'])
        self.queue_service.encode_function = QueueMessageFormat.text_base64encode
        self.queue_service.decode_function = QueueMessageFormat.text_base64decode

        log_msg = 'Cloudbutton v{} init for Azure Function Apps'.format(
            __version__)
        logger.info(log_msg)
        if not self.log_level:
            print(log_msg)

    def create_runtime(self,
                       docker_image_name,
                       memory=None,
                       timeout=azure_fa_config.RUNTIME_TIMEOUT_DEFAULT):
        """
        Creates a new runtime into Azure Function Apps 
        from the provided Linux image for consumption plan
        """

        log_msg = 'Creating new Cloudbutton runtime for Azure Function Apps...'
        logger.info(log_msg)
        if not self.log_level:
            print(log_msg)

        logger.info('Extracting preinstalls for Azure runtime')
        metadata = self._generate_runtime_meta()

        logger.info('Creating new Cloudbutton runtime')
        action_name = self._format_action_name(docker_image_name)
        self._create_runtime(action_name)

        return metadata

    def delete_runtime(self, docker_image_name, extract_preinstalls=False):
        """
        Deletes a runtime
        """
        if extract_preinstalls:
            action_name = docker_image_name
        else:
            action_name = self._format_action_name(docker_image_name)

        self.fa_client.delete_action(action_name)
        queue_name = self._format_queue_name(docker_image_name, type='trigger')
        self.queue_service.delete_queue(queue_name)

    def invoke(self, docker_image_name, memory=None, payload={}):
        """
        Invoke function
        """
        action_name = self._format_action_name(docker_image_name)
        queue_name = self._format_queue_name(action_name, type='trigger')

        try:
            msg = self.queue_service.put_message(queue_name,
                                                 json.dumps(payload))
            activation_id = msg.id

        except Exception:
            logger.debug('Creating queue (invoke)')
            self.queue_service.create_queue(queue_name)
            return self.invoke(docker_image_name,
                               memory=memory,
                               payload=payload)

        return activation_id

    def get_runtime_key(self, docker_image_name, runtime_memory):
        """
        Method that creates and returns the runtime key.
        Runtime keys are used to uniquely identify runtimes within the storage,
        in order to know which runtimes are installed and which not.
        """
        action_name = self._format_action_name(docker_image_name)
        runtime_key = os.path.join(self.name, action_name)

        return runtime_key

    def _format_action_name(self, action_name):
        sha_1 = hashlib.sha1()
        block = action_name.encode('ascii', errors='ignore')
        sha_1.update(block)
        tag = sha_1.hexdigest()[:8]

        sha_1 = hashlib.sha1()
        block = self.config['account_name'].encode('ascii', errors='ignore')
        sha_1.update(block)
        tag = tag + sha_1.hexdigest()[:8]

        version = re.sub(r'[/_:.-]', '', __version__)
        action_name = action_name[:16] + '-' + version[:5] + '-' + tag

        return action_name

    def _format_queue_name(self, action_name, type):
        #  Using different queue names because there is a delay between
        #  deleting a queue and creating another one with the same name
        return action_name + '-' + type

    def _create_runtime(self, action_name, extract_preinstalls=False):
        """
        Creates a new runtime with the base modules and cloudbutton
        """
        def add_base_modules():
            cmd = 'pip3 install -t {} -r requirements.txt'.format(
                azure_fa_config.ACTION_MODULES_DIR)
            child = sp.Popen(cmd, shell=True, stdout=sp.PIPE,
                             stderr=sp.PIPE)  # silent
            child.wait()
            logger.debug(child.stdout.read().decode())
            logger.debug(child.stderr.read().decode())

            if child.returncode != 0:
                cmd = 'pip install -t {} -r requirements.txt'.format(
                    azure_fa_config.ACTION_MODULES_DIR)
                child = sp.Popen(cmd,
                                 shell=True,
                                 stdout=sp.PIPE,
                                 stderr=sp.PIPE)  # silent
                child.wait()
                logger.debug(child.stdout.read().decode())
                logger.debug(child.stderr.read().decode())

                if child.returncode != 0:
                    logger.critical(
                        'Failed to install base modules for Azure Function')
                    exit(1)

        def add_cloudbutton_module():
            module_location = os.path.dirname(
                os.path.abspath(cloudbutton.__file__))
            shutil.copytree(
                module_location,
                os.path.join(azure_fa_config.ACTION_MODULES_DIR,
                             'cloudbutton'))

        def get_bindings_str(action_name, extract_preinstalls=False):
            if not extract_preinstalls:
                bindings = {
                    "scriptFile":
                    "__init__.py",
                    "bindings": [{
                        "name":
                        "msgIn",
                        "type":
                        "queueTrigger",
                        "direction":
                        "in",
                        "queueName":
                        self._format_queue_name(action_name, 'trigger'),
                        "connection":
                        "AzureWebJobsStorage"
                    }]
                }
            else:
                bindings = {
                    "scriptFile":
                    "__init__.py",
                    "bindings": [{
                        "name":
                        "msgIn",
                        "type":
                        "queueTrigger",
                        "direction":
                        "in",
                        "queueName":
                        self._format_queue_name(action_name, type='trigger'),
                        "connection":
                        "AzureWebJobsStorage"
                    }, {
                        "name":
                        "msgOut",
                        "type":
                        "queue",
                        "direction":
                        "out",
                        "queueName":
                        self._format_queue_name(action_name, type='result'),
                        "connection":
                        "AzureWebJobsStorage"
                    }]
                }
            return json.dumps(bindings)

        initial_dir = os.getcwd()
        temp_folder = next(tempfile._get_candidate_names())
        os.mkdir(temp_folder)
        os.chdir(temp_folder)

        try:

            # Create project folder from template
            project_template = os.path.join(
                os.path.dirname(os.path.abspath(__file__)), 'action')
            project_dir = os.path.join(initial_dir, temp_folder, action_name)
            shutil.copytree(project_template, project_dir)

            os.chdir(project_dir)
            action_dir = os.path.join(project_dir, action_name)
            os.rename('action', action_dir)

            # Add the base dependencies and current cloudbutton module
            logger.debug('Adding runtime base modules')
            os.makedirs(azure_fa_config.ACTION_MODULES_DIR, exist_ok=True)
            add_base_modules()
            add_cloudbutton_module()

            # Set entry point file
            if extract_preinstalls:
                entry_point_file = 'extract_preinstalls_action.py'
            else:
                entry_point_file = 'handler_action.py'

            os.rename(os.path.join(action_dir, entry_point_file),
                      os.path.join(action_dir, '__init__.py'))

            # Edit the function's bindings for it to be a queue triggered function
            with open(os.path.join(action_dir, 'function.json'),
                      'w') as bindings_file:
                bindings_file.write(
                    get_bindings_str(action_name, extract_preinstalls))

            # Create trigger queue, create action
            logger.debug('Creating trigger queue')
            queue_name = self._format_queue_name(action_name, type='trigger')
            self.queue_service.create_queue(queue_name)

            self.fa_client.create_action(action_name)

        except Exception as e:
            raise Exception("Unable to create the new runtime", e)

        finally:
            os.chdir(initial_dir)
            shutil.rmtree(temp_folder,
                          ignore_errors=True)  # Remove tmp project folder

    def _generate_runtime_meta(self):
        """
        Extract installed Python modules from Azure runtime
        """

        action_name = 'cloudbutton-extract-preinstalls-' + get_unique_id()
        self._create_runtime(action_name, extract_preinstalls=True)

        logger.debug("Invoking 'extract-preinstalls' action")
        try:
            runtime_meta = self._invoke_with_result(action_name)
        except Exception:
            raise Exception("Unable to invoke 'extract-preinstalls' action")
        try:
            self.delete_runtime(action_name, extract_preinstalls=True)
        except Exception:
            raise Exception("Unable to delete 'extract-preinstalls' action")

        if not runtime_meta or 'preinstalls' not in runtime_meta:
            raise Exception(runtime_meta)

        logger.debug("Extracted metadata succesfully")
        return runtime_meta

    def _invoke_with_result(self, action_name):
        result_queue_name = self._format_queue_name(action_name, type='result')
        self.queue_service.create_queue(result_queue_name)
        trigger_queue_name = self._format_queue_name(action_name,
                                                     type='trigger')
        self.queue_service.put_message(trigger_queue_name, '')

        msg = []
        while not msg:
            msg = self.queue_service.get_messages(result_queue_name,
                                                  num_messages=1)
            time.sleep(0.5)

        result_str = msg[0].content
        self.queue_service.delete_queue(result_queue_name)

        return json.loads(result_str)
Example #28
0
def put_to_queue(name, content):
    queue_service = QueueService(account_name=config.AZURE_STORAGE_NAME, account_key=config.AZURE_STORAGE_KEY)
    queue_service.create_queue(name)
    queue_service.put_message(name, content)
class AzureService(object):
    VISIBILITY_TIMEOUT = 5*60

    def __init__(self, connection_string, container_name, queue_get, queue_push, logger=None):
        self.ctnname = container_name
        self.getname = queue_get
        self.pushname = queue_push
        
        self.qs = QueueService(connection_string=connection_string,
                               protocol='https',
#                                endpoint_suffix='core.windows.net'
                                )
        self.bs = BlockBlobService(connection_string=connection_string)
        self.qs.create_queue(self.getname, timeout=1)
        self.qs.create_queue(self.pushname, timeout=1)
        self.bs.create_container(self.ctnname, timeout=1)
        if logger: logger.info('Init Azure success')
    
    def pushMessage(self, message, qname=None, logger=None):
        if qname is None:
            qname = self.pushname
        try:
            self.qs.put_message(self.pushname, message) 
        except Exception as e:
            if logger:
                logger.exception('ERROR PUSH MESSAGE ')
            else:
                print 'ERROR PUSH MESSAGE '
                print e
        
    def getMessage(self, qname=None, num=1, logger=None):
        if qname is None:
            qname = self.getname
        try:
            message = self.qs.get_messages(qname, num, visibility_timeout=self.VISIBILITY_TIMEOUT)
        except Exception as e:
            if logger:
                logger.exception('ERROR GET MESSAGE ')
            else:
                print 'ERROR GET MESSAGE '
                print e
            return []
        return message
    
    def getReceiptInfo(self, logger=None):
        message = self.getMessage(logger=logger)
        if len(message) > 0:
            rinfo = ReceiptSerialize.fromjson(message[0].content)   
            return message[0], rinfo
        else:
            return None, None
        
    def count(self):
        metadata_get = self.qs.get_queue_metadata(self.getname)
        metadata_push = self.qs.get_queue_metadata(self.pushname)
        generator = self.bs.list_blobs(self.ctnname)
        bc = 0
        for blob in generator:
            bc += 1
        return {'get_count' : metadata_get.approximate_message_count, 
                'push_count': metadata_push.approximate_message_count,
                'blob_count': bc
                } 
    
    def uploadFolder(self, folderpath, logger):
        for filename in os.listdir(folderpath):
            if len(filename) > 4:
                suffix = filename[-4:].upper()
            else:
                continue
            if '.JPG' == suffix or 'JPEG' == suffix:
                receipt_metadata = ReceiptSerialize()
                receipt_metadata.receiptBlobName = unicode(filename, 'utf-8')
                self.qs.put_message(self.getname, b64encode(receipt_metadata.toString()).decode('utf-8')) 
                self.bs.create_blob_from_path(self.ctnname, receipt_metadata.receiptBlobName, os.path.join(folderpath, filename), max_connections=2, timeout=None)
                logger.info('upload %s', filename)
    
    def getImage(self, imgname, logger=None):
        localpath= os.path.join(args.download_dir, imgname)
        try:
            self.bs.get_blob_to_path(self.ctnname, imgname, localpath)
        except AzureMissingResourceHttpError as e:
            if logger:
                logger.error('Blob named ' + imgname + ' doesnot exist.' , exc_info=True)
            else:
                print 'Blob named ' + imgname + ' doesnot exist.' 
                print e            
            return ''
        except Exception as e:
            if logger:
                logger.error('Exception while getting blob.', exc_info=True)
            else:
                print 'Exception while getting blob.' 
                print e            
            return None
        return localpath
    
    def deleteMessage(self, message, qname=None, logger=None): 
        if qname is None:
            qname = self.getname
        try:
            self.qs.delete_message(qname, message.id, message.pop_receipt)
        except Exception as e:
            if logger:
                logger.exception('ERROR DELETE MESSAGE ')
            else:
                print 'ERROR DELETE MESSAGE '
                print e
                
    def deleteImage(self, imgname, logger=None):
        try: 
            self.bs.delete_blob(self.ctnname, imgname)
        except Exception as e:
            if logger:
                logger.exception('ERROR DELETE IMAGE ')
            else:
                print 'ERROR DELETE IMAGE '
                print e
        
    def cleanUp(self):
        count = 0
        print('deleted: ')
        while True:
            messages = self.qs.get_messages(self.getname)
            for message in messages:
                count += 1
                self.qs.delete_message(self.getname, message.id, message.pop_receipt)
            if len(messages) == 0: break
        print(str(count) + ' from queue-get')
        count = 0
        while True:
            messages = self.qs.get_messages(self.pushname)
            for message in messages:
                count += 1
                self.qs.delete_message(self.pushname, message.id, message.pop_receipt)
            if len(messages) == 0: break
        print(str(count) + ' from queue-push') 
        count = 0
        generator = self.bs.list_blobs(self.ctnname)
        for blob in generator:
            count += 1     
            self.bs.delete_blob(self.ctnname, blob.name)
        print(str(count) + ' from container') 
Example #30
0
class StorageQueueContext():
    """Initializes the repository with the specified settings dict.
        Required settings in config dict are:
        - AZURE_STORAGE_NAME
        - AZURE_STORAGE_KEY
    """
    
    _models = []
    _service = None
    _storage_key = ''
    _storage_name = ''

    def __init__(self, **kwargs):

        self._storage_name = kwargs.get('AZURE_STORAGE_NAME', '')
        self._storage_key = kwargs.get('AZURE_STORAGE_KEY', '')

        """ service init """
        if self._storage_key != '' and self._storage_name != '':
            self._service = QueueService(account_name = self._storage_name, account_key = self._storage_key, protocol='https')

        """ registered models """
        self._models = []

        """ encrypt queue service """
        if kwargs.get('AZURE_REQUIRE_ENCRYPTION', False):

            # Create the KEK used for encryption.
            # KeyWrapper is the provided sample implementation, but the user may use their own object as long as it implements the interface above.
            kek = KeyWrapper(kwargs.get('AZURE_KEY_IDENTIFIER', 'otrrentapi'), kwargs.get('SECRET_KEY', 'super-duper-secret')) # Key identifier

            # Create the key resolver used for decryption.
            # KeyResolver is the provided sample implementation, but the user may use whatever implementation they choose so long as the function set on the service object behaves appropriately.
            key_resolver = KeyResolver()
            key_resolver.put_key(kek)

            # Set the require Encryption, KEK and key resolver on the service object.
            self._service.require_encryption = True
            self._service.key_encryption_key = kek
            self._service.key_resolver_funcion = key_resolver.resolve_key
        pass
     
    def __create__(self, queue) -> bool:
        if (not self._service is None):
            try:
                self._service.create_queue(queue)
                return True
            except AzureException as e:
                log.error('failed to create {} with error {}'.format(queue, e))
                return False
        else:
            return True
        pass

    def register_model(self, storagemodel:object):
        modelname = storagemodel.__class__.__name__     
        if isinstance(storagemodel, StorageQueueModel):
            if (not modelname in self._models):
                self.__create__(storagemodel._queuename)
                self._models.append(modelname)
                log.info('model {} registered successfully. Models are {!s}'.format(modelname, self._models))      
        pass

    def put(self, storagemodel:object) -> StorageQueueModel:
        """ insert queue message into storage """

        modelname = storagemodel.__class__.__name__
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ peek first message in queue """
                try:
                    message = self._service.put_message(storagemodel._queuename, storagemodel.getmessage())
                    storagemodel.mergemessage(message)

                except AzureException as e:
                    log.error('can not save queue message:  queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e))
                    storagemodel = None
            else:
                log.info('please register model {} first'.format(modelname))
                storagemodel = None
        else:
            log.info('model {} is not a Queue Model'.format(modelname))
            storagemodel = None

        return storagemodel

    def peek(self, storagemodel:object) -> StorageQueueModel:
        """ lookup the next message in queue """

        modelname = storagemodel.__class__.__name__
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ peek first message in queue """
                try:
                    messages = self._service.peek_messages(storagemodel._queuename, num_messages=1)

                    """ parse retrieved message """
                    for message in messages:
                        storagemodel.mergemessage(message)

                    """ no message retrieved ?"""
                    if storagemodel.id is None:
                        storagemodel = None

                except AzureException as e:
                    log.error('can not peek queue message:  queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e))
                    storagemodel = None
            else:
                log.info('please register model {} first'.format(modelname))
                storagemodel = None
        else:
            log.info('model {} is not a Queue Model'.format(modelname))
            storagemodel = None

        return storagemodel

    def get(self, storagemodel:object, hide = 0) -> StorageQueueModel:
        """ lookup the next message in queue """
        modelname = storagemodel.__class__.__name__
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ get first message in queue """
                try:
                    if hide > 0:
                        messages = self._service.get_messages(storagemodel._queuename, num_messages=1, visibility_timeout = hide)
                    else:
                        messages = self._service.get_messages(storagemodel._queuename, num_messages=1)
                    
                    """ parse retrieved message """
                    for message in messages:
                        storagemodel.mergemessage(message)

                    """ no message retrieved ?"""
                    if storagemodel.id is None:
                        storagemodel = None

                except AzureException as e:
                    log.error('can not get queue message:  queue {} with message {} because {!s}'.format(storagemodel._queuename, storagemodel.content, e))
                    storagemodel = None
            else:
                log.info('please register model {} first'.format(modelname))
                storagemodel = None
        else:
            log.info('model {} is not a Queue Model'.format(modelname))
            storagemodel = None

        return storagemodel

    def update(self, storagemodel:object, hide = 0) -> StorageQueueModel:
        """ update the message in queue """
        modelname = storagemodel.__class__.__name__
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ check if message in queue """
                if (storagemodel.id != '') and (storagemodel.pop_receipt != '') and (not storagemodel.id is None) and (not storagemodel.pop_receipt is None):
                    try:
                        content = storagemodel.getmessage()
                        message = self._service.update_message(storagemodel._queuename, storagemodel.id, storagemodel.pop_receipt, visibility_timeout = hide, content=content)
                        storagemodel.content = content
                        storagemodel.pop_receipt = message.pop_receipt

                    except AzureException as e:
                        log.error('can not update queue message:  queue {} with message.id {!s} because {!s}'.format(storagemodel._queuename, storagemodel.id, e))
                        storagemodel = None
                else:
                    log.info('cant update queuemessage {} due to missing id and pop_receipt'.format(modelname))
                    storagemodel = None
            else:
                log.info('please register model {} first'.format(modelname))
                storagemodel = None
        else:
            log.info('model {} is not a Queue Model'.format(modelname))
            storagemodel = None

        return storagemodel

    def delete(self, storagemodel:object) -> bool:
        """ delete the message in queue """
        modelname = storagemodel.__class__.__name__
        deleted = False
        if isinstance(storagemodel, StorageQueueModel):
            if (modelname in self._models):
                """ check if message in queue """
                if (storagemodel.id != '') and (storagemodel.pop_receipt != '') and (not storagemodel.id is None) and (not storagemodel.pop_receipt is None):
                    try:
                        self._service.delete_message(storagemodel._queuename, storagemodel.id, storagemodel.pop_receipt)
                        deleted = True

                    except AzureException as e:
                        log.error('can not delete queue message:  queue {} with message.id {!s} because {!s}'.format(storagemodel._queuename, storagemodel.id, e))
                else:
                    log.info('cant update queuemessage {} due to missing id and pop_receipt'.format(modelname))
            else:
                log.info('please register model {} first'.format(modelname))
        else:
            log.info('model {} is not a Queue Model'.format(modelname))

        return deleted
Example #31
0
class QueueLogger(object):
    """
    This class contains functionality to log stdout to Azure Queue Storage
    """
    def __init__(self):
        """
        Initializes a new instance of the QueueLogger class.

        :param int batch_size: The number of messages to write into a single Azure Storage Queue message.
        """
        self.config = Config()
        self.queue_service = QueueService(account_name =  self.config.storage_account_name,
            sas_token = self.config.logger_queue_sas)
        self.queue_service.encode_function = models.QueueMessageFormat.noencode
        self.init_storage()

    def start_listening(self):
        for line in fileinput.input():
            self.write(line.strip())

    def init_storage(self):
        """
        Initializes storage table & queue, creating it if it doesn't exist.
        :return: True on succeess. False on failure.
        :rtype: boolean
        """
        try:
            # will create the logger queue if it doesn't exist
            self.queue_service.create_queue(self.config.logger_queue_name)
            return True
        except Exception as ex:
            self.write_stdout(ex)
            return False

    def flush(self):
        """
        Flush the internal buffer to Storage Queue
        """
        self.put_message_to_queue()

    def write(self, content):
        """
        Buffers string content to be written to Storage Queue

        :param str content: The content to write/buffer
        """
        self.write_stdout(content)
        self.write_queue(content)

    def write_queue(self, content):
        """
        Buffers string content to be written to Storage Queue

        :param str content: The content to write/buffer
        """
        self.queue_service.put_message(self.config.logger_queue_name, content)

    def write_stdout(self, content):
        """
        Buffers string content to be written to Storage Queue

        :param str content: The content to write/buffer
        """
        print(content)
Example #32
0
from azure.storage.queue import QueueService

quename = "mytestqueue"
message = [u"Hello, World!", u"Hello, House!", u"Hello, God!"]
accountname = "xxxx"
accountkey = "xxxx"

queue_service = QueueService(account_name=accountname, account_key=accountkey)

queue_service.create_queue(quename)

for x in message:
    queue_service.put_message(quename,x)
    messages = queue_service.peek_messages(quename)
    for message in messages:
        print(message.content)

metadata = queue_service.get_queue_metadata(quename)
count = metadata.approximate_message_count
print(count)





class Queue:
    def __init__(self, account_name, account_key, queue_name="logqueue"):
        """Initialiaze a queue. The type is set by the
        'ACS_LOGGING_QUEUE_TYPE' environment variable. If it is set to
        'AzureStorageQueue' then values must be provided for
        'account_name' and 'account_key' which are values associated
        with the Azure Storage account. 'queue_name' is optional and
        defaults to 'logqueue'.

        """
        self.log = Log()
        self.queue_type = config.ACS_LOGGING_QUEUE_TYPE
        self.queue_name = queue_name
        # self.log.debug("Queue type: " + self.queue_type + " / " + self.queue_name)

        if self.queue_type == "AzureStorageQueue":
            self.createAzureQueues(account_name, account_key)
        elif self.queue_type == "LocalFile":
            self.file_queue = open(config.UNPROCESSED_LOG_FILE, "w+")
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def createAzureQueues(self, account_name, account_key):
        """
        Create a queue for unprocessed log messages. Entries in the queue
        will be dequeued, processed and deleted upon success.
        """
        self.queue_service = QueueService(account_name, account_key)
        self.queue_service.create_queue(self.queue_name)

    def close(self):
        """Perform any necessary clearnup on the queue
           at the end of a run.
        """
        if self.queue_type == "AzureStorageQueue":
            pass
        elif self.queue_type == "LocalFile":
            self.file_queue.close()
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def enqueue(self, msg, level="INFO"):
        msg = level + " - " + msg
        if self.queue_type == "LocalFile":
            file_queue.write(msg + "\n")
        elif self.queue_type == "AzureStorageQueue":
            self.queue_service.put_message(self.queue_name, msg)
        self.log.debug(msg)

    def dequeue(self):
        messages = []
        if self.queue_type == "LocalFile":
            with open(config.UNPROCESSED_LOG_FILE, "r") as f:
                messages = f.readlines()[1]
        elif self.queue_type == "AzureStorageQueue":
            messages = self.queue_service.get_messages(self.queue_name)
        return messages

    def delete(self, message):
        self.queue_service.delete_message(self.queue_name, message.message_id, message.pop_receipt)
        #  with open(config.PROCESSED_LOG_FILE, 'a') as processed:
        #    processed.write(log)
        #  os.remove(config.UNPROCESSED_LOG_FILE)

    def delete_queue(self, queue_name):
        self.queue_service.delete_queue(queue_name, False)

    def getLength(self):
        """
        Get the approximate length of the queue
        """
        queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
        count = queue_metadata["x-ms-approximate-messages-count"]
        return count
 def create_queue_from_storage_account(storage_account, name, session):
     token = StorageUtilities.get_storage_token(session)
     queue_service = QueueService(
         account_name=storage_account.name,
         token_credential=token)
     return queue_service.create_queue(name)
Example #35
0
    }

stor_acc_name = service_keys['stor_acc_name']
stor_acc_key = service_keys['stor_acc_key']
redis_pass = service_keys['redis_pass']
redis_server = service_keys['redis_server']
instr_key = service_keys['instr_key']


# storage
account_name = stor_acc_name
account_key = stor_acc_key
blob_service = BlobService(account_name, account_key)
blob_service.create_container('images')
queue_service = QueueService(account_name, account_key)
queue_service.create_queue('taskqueue')
table_service = TableService(account_name, account_key)
table_service.create_table('tasktable')


r = redis.StrictRedis(host=redis_server, port=6380, db=0, password=redis_pass, ssl=True)

tc = TelemetryClient(instr_key)

@app.route('/')
@app.route('/home')
def form():
    return render_template('form_submit.html')


@app.route('/hello/', methods=['POST'])
class Queue:
    def __init__(self, account_name, account_key, queue_name="logqueue"):
        """Initialiaze a queue. The type is set by the
        'ACS_LOGGING_QUEUE_TYPE' environment variable. If it is set to
        'AzureStorageQueue' then values must be provided for
        'account_name' and 'account_key' which are values associated
        with the Azure Storage account. 'queue_name' is optional and
        defaults to 'logqueue'.

        """
        self.log = Log()
        self.queue_type = config.ACS_LOGGING_QUEUE_TYPE
        self.queue_name = queue_name
        # self.log.debug("Queue type: " + self.queue_type + " / " + self.queue_name)

        if self.queue_type == "AzureStorageQueue":
            self.createAzureQueues(account_name, account_key)
        elif self.queue_type == "LocalFile":
            self.file_queue = open(config.UNPROCESSED_LOG_FILE, 'w+')
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def getName(self):
        return self.queue_name
        
    def createAzureQueues(self, account_name, account_key):
        """
        Create a queue for unprocessed log messages. Entries in the queue
        will be dequeued, processed and deleted upon success.
        """
        self.queue_service = QueueService(account_name, account_key)
        self.queue_service.create_queue(self.queue_name)

    def close(self):
        """Perform any necessary clearnup on the queue
           at the end of a run.
        """
        if self.queue_type == "AzureStorageQueue":
            pass
        elif self.queue_type == "LocalFile":
            self.file_queue.close()
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def enqueue(self, msg):
        if self.queue_type == "LocalFile":
            file_queue.write(msg + '\n')
        elif self.queue_type == "AzureStorageQueue":
            self.queue_service.put_message(self.queue_name, msg)
        else:
            self.log.error("We don't know how to handle queues of type " + self.queue_type)
        self.log.debug(msg)

    def dequeue(self):
        messages = []
        if self.queue_type == "LocalFile":
            with open(config.UNPROCESSED_LOG_FILE, 'r') as f:
                messages = f.readlines()[1]
        elif self.queue_type == "AzureStorageQueue":
            messages = self.queue_service.get_messages(self.queue_name)
        return messages

    def delete(self, message):
        self.queue_service.delete_message(self.queue_name, message.message_id, message.pop_receipt)
        #  with open(config.PROCESSED_LOG_FILE, 'a') as processed:
        #    processed.write(log)
        #  os.remove(config.UNPROCESSED_LOG_FILE)

    def delete_queue(self, queue_name):
        self.queue_service.delete_queue(queue_name, False)

    def getLength(self):
        """
        Get the approximate length of the queue
        """
        queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
        count = queue_metadata['x-ms-approximate-messages-count']
        return int(count)

    def peek_messages(self, num_messages):
        """
        Peek at the top messages in the queue. This method does not remove the
        messages from the queue.
        """
        return self.queue_service.peek_messages(self.queue_name, num_messages)
Example #37
0
class MainPawWorker:
    """
    Main class to use for running a worker. call start_workers() to start.
    """
    def __init__(self,
                 azure_storage_name,
                 azure_storage_private_key,
                 azure_queue_name,
                 azure_table_name,
                 tasks_module,
                 workers,
                 visibility_timeout=VISIBILITY_TIMEOUT):
        """
        :param azure_storage_name: Name of Azure storage account
        :param azure_storage_private_key: Private key of Azure storage account.
        :param azure_queue_name: Name of the Azure queue to use.
        :param azure_table_name: Name of the Azure table to use.
        :param tasks_module: Module containing decorated functions to load from.
        :param workers: Int of workers. Ex: 4
        :param visibility_timeout: Seconds in int to keep message in Azure queue
        """
        self.account_name = azure_storage_name
        self.account_key = azure_storage_private_key
        self.queue_name = azure_queue_name
        self.table_name = azure_table_name
        self.tasks_module = tasks_module
        self.workers = workers
        self.visibility_timeout = visibility_timeout

        if self.visibility_timeout > MAXIMUM_VISIBILITY_TIMEOUT:
            raise PawError('self.visibility_timeout bigger than allowed limit')

        self.queue_service = QueueService(account_name=self.account_name,
                                          account_key=self.account_key)
        self.table_service = TableService(account_name=self.account_name,
                                          account_key=self.account_key)
        self.local_queue = Queue(self.workers)
        # self.logger = logging.getLogger()
        self.logger = LOGGER

        self.logger.info(PAW_LOGO)

        self.worker_process = Worker(
            local_queue=self.local_queue,
            queue_service=self.queue_service,
            queue_name=self.queue_name,
            table_service=self.table_service,
            table_name=azure_table_name,
            tasks=self._load_tasks(),
        )
        self.pool = Pool(self.workers, self.worker_process.run, ())
        signal.signal(signal.SIGTERM, self.on_exit)

    def on_exit(self, signum, frame):
        self.pool.terminate()
        sys.exit()

    def _load_tasks(self):
        """
        Loads and returns decorated functions from a given modules, as a dict
        """
        tasks = dict([
            o for o in getmembers(self.tasks_module)
            if isfunction(o[1]) and hasattr(o[1], 'paw')
        ])

        for t, f in tasks.items():
            self.logger.info("REGISTERED '{}'".format(t))
            if f.description:
                self.logger.info("\tdescription: '{}'".format(f.description))
        if not tasks:
            self.logger.warning("No tasks found...")

        return tasks

    def start_workers(self, sleep_for=5):
        """
        Starts workers and picks message from the Azure queue. On new
        message, when the local queue has room, the message is placed for a
        worker to pick-up
        :param sleep_for: Seconds to sleep for after a loop end.
        """
        self.queue_service.create_queue(self.queue_name)
        create_table_if_missing(self.table_service, self.table_name)

        try:
            self.logger.info(
                "Cleaning up dead jobs left in {}".format(STARTED))
            dead_jobs = self.table_service.query_entities(
                table_name=self.table_name,
                filter="status eq '{}'".format(STARTED))
            for job in dead_jobs.items:
                log_to_table(table_service=self.table_service,
                             table_name=self.table_name,
                             message=job,
                             status=LOST_WORKER,
                             result="Lost worker, or task aborted.")

        except AzureException as e:
            self.logger.error("Cleaning dead tasks failed: {}".format(e))

        while True:
            if self.local_queue.full():
                time.sleep(sleep_for)

            try:
                new_msg = self.queue_service.get_messages(
                    queue_name=self.queue_name,
                    num_messages=1,
                    visibility_timeout=self.visibility_timeout)
            except AzureException:
                self.logger.error("Error while getting message "
                                  "from Azure queue. Trying to create "
                                  "the queue")
                self.queue_service.create_queue(self.queue_name)
                time.sleep(sleep_for)
                continue

            if new_msg:
                msg = new_msg[0]
                try:
                    content = json.loads(msg.content)
                except json.JSONDecodeError:
                    self.logger.critical('Json error {}'.format(
                        traceback.format_exc()))
                    try:
                        self.queue_service.delete_message(
                            queue_name=self.queue_name,
                            message_id=msg.id,
                            pop_receipt=msg.pop_receipt)
                    except AzureException:
                        self.logger.critical(
                            'Deleting invalid message from queue failed: '
                            '{}'.format(traceback.format_exc()))
                    continue

                if msg.dequeue_count > MAXIMUM_DEQUEUE_COUNT:
                    log_to_table(
                        table_service=self.table_service,
                        table_name=self.table_name,
                        message=content,
                        status=FAILED,
                        result="PAW MESSAGE: Dequeue count exceeded.",
                    )
                    self.queue_service.delete_message(self.queue_name, msg.id,
                                                      msg.pop_receipt)
                    continue

                content['msg'] = msg
                while True:
                    try:
                        self.local_queue.put_nowait(content)
                        break
                    except Full:
                        self.logger.info('LOCAL QUEUE FULL: waiting...')
                        time.sleep(sleep_for)

                self.logger.debug('ADDING: {}'.format(content['task_name']))

            time.sleep(sleep_for)
Example #38
0
 def create_queue_from_storage_account(storage_account, name):
     keys = StorageUtilities.get_storage_keys(storage_account.id)
     queue_service = QueueService(account_name=storage_account.name,
                                  account_key=keys[0].value)
     return queue_service.create_queue(name)
Example #39
0
# Main thread - Check if port 25565 and move to the next if it's not. Once a free port has been found, create a new instance (up to 3)
while instanceCreated == 0:
        freeport = checkPort(defaultport)
        if freeport == 0:
                defaultport += 1
                counter += 1
                if counter > maxInstances:
                        print "Too many instances! you can't run more than %s instances" % maxInstances
                        sys.exit()
        else:
                if debug == 1: print "Port %s is free" % defaultport
                container = createMcInstance(defaultport)
                instanceCreated = 1
                counter +=1

# Return Instance ID, port and total number of instances once finished creating the server
print "Instance ID: %s" % container["Id"]
print "Listening on port %s" % defaultport
print "There are %s instances running on this server" % counter

# Update servers queue
queue_service = QueueService(account_name, account_key)
try:
	queue_service.create_queue(queuename)
except:
	print "Queue creation failed."

queue_service.put_message(queuename, '{"ContainerID":"%s", "ContainerPort":"%s", "ServerID":"%s"}' % container["Id"], defaultport, socket.gethostname())

sys.exit()
Example #40
0
def get_queue_service():
    "Returns the QueueService that can be used to put, peek, get and delete messages"
    queue_service = QueueService(ACCOUNT_NAME, ACCOUNT_KEY)
    queue_service.create_queue(QUEUE_NAME)
    return queue_service
Example #41
0
PUT_URL = BASEURL + "/api/zip/site/wwwroot"

SOURCE_DIR = os.path.dirname(os.path.abspath(__file__))

file_like_object = io.BytesIO()
zipfile_obj = zipfile.ZipFile(file_like_object, mode='w')

for f in ['function.json', 'run.py', 'jobrunner.py']:
    f = os.path.abspath(os.path.join(SOURCE_DIR, f))
    a = os.path.relpath(f, SOURCE_DIR + "/..")
    zipfile_obj.write(f, arcname=a)

zipfile_obj.writestr("host.json", "{}")

zipfile_obj.close()

r = put(PUT_URL, auth = (KUDU_USER, KUDU_PASS), data=file_like_object.getvalue())
print r.text
print "funciion deployed"

queue_service = QueueService(account_name=AZURE_STORAGE_ACC_NAME, account_key=AZURE_STORAGE_KEY)
queue_service.create_queue("pywrenqueue")

print "queue deployed"

blob_service = BlockBlobService(account_name=AZURE_STORAGE_ACC_NAME, account_key=AZURE_STORAGE_KEY)
blob_service.create_container("pywren1")

print "container created"

    logger = logging.getLogger(loggerName)
    init_logger()
    try:
        azureQueueAccountName = ""
        azureQueueKey = ""
        azureQueueAnalysisRecognizedItems = "recognizeditems-processing"
        azureQueueAnalysisResults = "analysis-results"
        azureQueueImageResults = "image-processing"

        queue_service = QueueService(account_name=azureQueueAccountName,
                                     account_key=azureQueueKey)

        while True:
            #create queue if doesnt exist
            if not queue_service.exists(azureQueueAnalysisRecognizedItems):
                queue_service.create_queue(azureQueueAnalysisRecognizedItems)

            if not queue_service.exists(azureQueueImageResults):
                queue_service.create_queue(azureQueueImageResults)

            #get queue count
            metadata = queue_service.get_queue_metadata(
                azureQueueAnalysisResults)
            queue_count = metadata.approximate_message_count

            if queue_count > 0:
                read_next_in_queue()
            else:
                logger.info("time.sleep(3000)")
                time.sleep(3000)
 def create_queue_from_storage_account(storage_account, name):
     keys = StorageUtilities.get_storage_keys(storage_account.id)
     queue_service = QueueService(account_name=storage_account.name,
                                  account_key=keys[0].value)
     return queue_service.create_queue(name)