Example #1
0
class AzureProvider(BaseProvider):
    """Queue Provider for the Microsoft Azure."""

    def __init__(self, account_name, account_key):
        self.queue_service = QueueService(
            account_name=account_name, account_key=account_key
        )

    def put_message(self, queue_name: str, message: str):
        self.queue_service.put_message(
            queue_name=queue_name,
            message_text=message,
        )

    def get_message(self, queue_name: str):
        try:
            queue_message = self.queue_service.get_messages(
                queue_name=queue_name, numofmessages=1
            ).queue_messages[0]
        except IndexError:
            return {}
        else:
            return {
                attr_name: getattr(queue_message, attr_name)
                for attr_name in dir(queue_message)
                if not attr_name.startswith('_')
            }

    def delete_message(self, queue_name: str, message: dict):
        message_id = message.get('message_id')
        pop_receipt = message.get('pop_receipt')

        self.queue_service.delete_message(
            queue_name, message_id=message_id, popreceipt=pop_receipt,
        )
    def test_sas_update(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        self.qs.put_message(self.test_queues[0], 'message1')
        token = self.qs.generate_shared_access_signature(
            self.test_queues[0],
            self._get_shared_access_policy(QueueSharedAccessPermissions.UPDATE),
        )
        result = self.qs.get_messages(self.test_queues[0])

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        service.update_message(
            self.test_queues[0],
            result[0].message_id,
            'updatedmessage1',
            result[0].pop_receipt,
            visibilitytimeout=0,
        )

        # Assert
        result = self.qs.get_messages(self.test_queues[0])
        self.assertEqual('updatedmessage1', result[0].message_text)
    def test_sas_update(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_name = self._create_queue()
        self.qs.put_message(queue_name, u'message1')
        token = self.qs.generate_queue_shared_access_signature(
            queue_name,
            QueuePermissions.UPDATE,
            datetime.utcnow() + timedelta(hours=1),
        )
        result = self.qs.get_messages(queue_name)

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        service.update_message(
            queue_name,
            result[0].id,
            result[0].pop_receipt,
            visibility_timeout=0,
            content=u'updatedmessage1',
        )

        # Assert
        result = self.qs.get_messages(queue_name)
        self.assertEqual(u'updatedmessage1', result[0].content)
    def test_sas_process(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        self.qs.put_message(self.test_queues[0], 'message1')
        token = self.qs.generate_shared_access_signature(
            self.test_queues[0],
            self._get_shared_access_policy(QueueSharedAccessPermissions.PROCESS),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.get_messages(self.test_queues[0])

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.message_id)
        self.assertEqual('message1', message.message_text)
    def sas_with_signed_identifiers(self):
        queue_name = self._create_queue()
        self.service.put_message(queue_name, u'message1')

        # Set access policy on queue
        access_policy = AccessPolicy(permission=QueuePermissions.PROCESS,
                                     expiry=datetime.utcnow() + timedelta(hours=1))
        identifiers = {'id': access_policy}
        acl = self.service.set_queue_acl(queue_name, identifiers)

        # Wait 30 seconds for acl to propagate
        time.sleep(30)

        # Indicates to use the access policy set on the queue
        token = self.service.generate_queue_shared_access_signature(
            queue_name,
            id='id'
        )

        # Create a service and use the SAS
        sas_service = QueueService(
            account_name=self.account.account_name,
            sas_token=token,
        )

        messages = sas_service.get_messages(queue_name)
        for message in messages:
            print(message.content) # message1

        self.service.delete_queue(queue_name)
    def get_queue_client_by_uri(queue_uri):
        queue_name, storage_name, key = StorageUtilities.get_storage_from_uri(queue_uri)

        queue_service = QueueService(account_name=storage_name, account_key=key)
        queue_service.create_queue(queue_name)

        return queue_service, queue_name
    def get_queue_client_by_uri(queue_uri, session=None):
        storage = StorageUtilities.get_storage_from_uri(queue_uri, session)

        queue_service = QueueService(account_name=storage.storage_name, account_key=storage.key)
        queue_service.create_queue(storage.container_name)

        return queue_service, storage.container_name
    def test_sas_process(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_name = self._create_queue()
        self.qs.put_message(queue_name, u'message1')
        token = self.qs.generate_queue_shared_access_signature(
            queue_name,
            QueuePermissions.PROCESS,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.get_messages(queue_name)

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)
class QueueStorageHandler(logging.Handler):
    """
    Handler class which sends log messages to a Azure Storage queue.
    """
    def __init__(self, 
                 account_name=None,
                 account_key=None,
                 protocol='https',
                 queue='logs',
                 message_ttl=None,
                 visibility_timeout=None,
                 base64_encoding=False,
                 is_emulated=False,
                 ):
        """
        Initialize the handler.
        """
        logging.Handler.__init__(self)
        self.service = QueueService(account_name=account_name,
                                    account_key=account_key,
                                    is_emulated=is_emulated,
                                    protocol=protocol)
        self.meta = {'hostname': gethostname(), 'process': os.getpid()}
        self.queue = _formatName(queue, self.meta)
        self.queue_created = False
        self.message_ttl = message_ttl
        self.visibility_timeout = visibility_timeout
        self.base64_encoding = base64_encoding

    def emit(self, record):
        """
        Emit a record.

        Format the record and send it to the specified queue.
        """
        try:
            if not self.queue_created:
                self.service.create_queue(self.queue)
                self.queue_created = True
            record.hostname = self.meta['hostname']
            msg = self._encode_text(self.format(record))
            self.service.put_message(self.queue,
                                     msg,
                                     self.visibility_timeout,
                                     self.message_ttl)
        except (KeyboardInterrupt, SystemExit):
            raise
        except:
            self.handleError(record)

    def _encode_text(self, text):
        if self.base64_encoding:
            text = b64encode(text.encode('utf-8')).decode('ascii')
        # fallback for the breaking change in azure-storage 0.33
        elif sys.version_info < (3,):
            if not isinstance(text, unicode):
                text = text.decode('utf-8')
        return text
    def get_queue_client_by_uri(queue_uri, session):
        storage = StorageUtilities.get_storage_from_uri(queue_uri, session)

        queue_service = QueueService(
            account_name=storage.storage_name,
            token_credential=storage.token)
        queue_service.create_queue(storage.container_name)

        return queue_service, storage.container_name
class RegistrationQueueClient(object):
    """Obtains next registration object from queue"""
    queueName = 'registrationsqueue'

    def __init__(self, account_name, account_key):
        self._queueService = QueueService(account_name, account_key)
        print("RegistrationQueue Initialized")

    def LookupTicket(self, processData):
        message = self._queueService.get_messages(self.queueName, num_messages=1)
        if(processData(message)):
            self._queueService.delete_message(self.queueName, message.message_id, message.pop_receipt)
Example #12
0
class SimulationListener(Thread):
    def __init__(self):
        Thread.__init__(self)
        self._quit = Event()
        self.daemon = True
        self.log = logging.getLogger(__name__)

        settings = Settings()
        self.create_queue = 'create-simulated-machine'
        self.destroy_queue = 'destroy-simulated-machine'

        self.queue_service = QueueService(
            account_name=settings.azure_queue_account,
            account_key=settings.azure_queue_key
        )
        self.queue_service.create_queue(self.create_queue)
        self.queue_service.create_queue(self.destroy_queue)

    def run(self):
        # dislike of unstoppable threads
        while not self._quit.is_set():
            try:
                messages = self.queue_service.get_messages(self.create_queue, numofmessages=10)
                for m in messages:
                    machine_json = b64decode(m.message_text)
                    machine = json.loads(machine_json)
                    self.log.info("Creating: " + machine["Name"] + " on " + machine["Provider"])
                    self.log.debug(machine_json)
                    requests.post("http://localhost:8080/machines", machine_json)
                    self.queue_service.delete_message(self.create_queue, m.message_id, m.pop_receipt)
            except Exception as e:
                self.log.error(e.message)

            try:
                messages = self.queue_service.get_messages(self.destroy_queue, numofmessages=10)
                for m in messages:
                    machine_json = b64decode(m.message_text)
                    machine = json.loads(machine_json)
                    self.log.info("Deleting: " + machine["Name"] + " on " + machine["Provider"])
                    self.log.debug(machine_json)
                    requests.delete("http://localhost:8080/machines/" + machine["MachineId"])
                    self.queue_service.delete_message(self.destroy_queue, m.message_id, m.pop_receipt)
            except Exception as e:
                self.log.error(e.message)

            sleep(1)

    def quit(self):
        self._quit.set()
Example #13
0
def get_queue_details(creds, resource_group_name, account_name, queue_name):
    keys = _get_storage_account_keys(creds, resource_group_name, account_name)
    queue_service = QueueService(account_name, keys.key1)

    model = StorageAccountQueueDetails()
    model.queue_name = queue_name
    model.metadata = queue_service.get_queue_metadata(queue_name)
    count = int(model.metadata.get("x-ms-approximate-messages-count", "0"))
    model.messages = queue_service.peek_messages(queue_name, count) if count else []
    for msg in model.messages:
        try:
            msg.decoded_text = b64decode(msg.message_text).decode()
        except:
            msg.decoded_text = None

    return model
 def createAzureQueues(self, account_name, account_key):
     """
     Create a queue for unprocessed log messages. Entries in the queue
     will be dequeued, processed and deleted upon success.
     """
     self.queue_service = QueueService(account_name, account_key)
     self.queue_service.create_queue(self.queue_name)
Example #15
0
 def setUp(self):
     self.service = QueueService(ACCOUNT_NAME, ACCOUNT_KEY)
     # ensure that there's no message on the queue before each test
     queues = set()
     for cfg in LOGGING['handlers'].values():
         if 'queue' in cfg:
             queues.add(cfg['queue'])
     for queue in self.service.list_queues():
         if queue.name in queues:
             self.service.clear_messages(queue.name)
class CheckinQueueClient(object):
    """Obtains next checkin object from queue"""
    queueName = 'checkinqueue'
    
    def __init__(self, account_name, account_key):
        self._queueService = QueueService(account_name, account_key)

    def startCheckinProcess(processData):
        while(True):
            message = self._queueService.get_messages(queueName,  num_messages=1, visibility_timeout=30)
            processData(message)
    def account_sas(self):
        queue_name = self._create_queue()
        metadata = {'val1': 'foo', 'val2': 'blah'}
        self.service.set_queue_metadata(queue_name, metadata=metadata)

        # Access to read operations on the queues themselves
        # Expires in an hour
        token = self.service.generate_account_shared_access_signature(
            ResourceTypes.CONTAINER,
            AccountPermissions.READ,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Create a service and use the SAS
        sas_service = QueueService(
            account_name=self.account.account_name,
            sas_token=token,
        )
        metadata = sas_service.get_queue_metadata(queue_name) # metadata={'val1': 'foo', 'val2': 'blah'}

        self.service.delete_queue(queue_name)
    def test_sas_add(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        token = self.qs.generate_shared_access_signature(
            self.test_queues[0],
            self._get_shared_access_policy(QueueSharedAccessPermissions.ADD),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.put_message(self.test_queues[0], 'addedmessage')

        # Assert
        result = self.qs.get_messages(self.test_queues[0])
        self.assertEqual('addedmessage', result[0].message_text)
    def test_sas_signed_identifier(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recordingfile(self.test_mode):
            return

        # Arrange
        si = SignedIdentifier()
        si.id = 'testid'
        si.access_policy.start = '2011-10-11'
        si.access_policy.expiry = '2018-10-12'
        si.access_policy.permission = QueueSharedAccessPermissions.READ
        identifiers = SignedIdentifiers()
        identifiers.signed_identifiers.append(si)

        resp = self.qs.set_queue_acl(self.test_queues[0], identifiers)

        self.qs.put_message(self.test_queues[0], 'message1')

        token = self.qs.generate_shared_access_signature(
            self.test_queues[0],
            SharedAccessPolicy(signed_identifier=si.id),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.peek_messages(self.test_queues[0])

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.message_id)
        self.assertEqual('message1', message.message_text)
    def test_sas_signed_identifier(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        access_policy = AccessPolicy()
        access_policy.start = '2011-10-11'
        access_policy.expiry = '2018-10-12'
        access_policy.permission = QueuePermissions.READ

        identifiers = {'testid': access_policy}

        queue_name = self._create_queue()
        resp = self.qs.set_queue_acl(queue_name, identifiers)

        self.qs.put_message(queue_name, u'message1')

        token = self.qs.generate_queue_shared_access_signature(
            queue_name,
            id='testid'
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_service_options(service, self.settings)
        result = service.peek_messages(queue_name)

        # Assert
        self.assertIsNotNone(result)
        self.assertEqual(1, len(result))
        message = result[0]
        self.assertIsNotNone(message)
        self.assertNotEqual('', message.id)
        self.assertEqual(u'message1', message.content)
Example #21
0
class TimeLineWorker:

    QUEUE_KEY = ''
    ACCOUNT_NAME = ''
    QUEUE_NAME = ''

    def __init__(self):
        self.queue_service = QueueService(account_name=TimeLineWorker.ACCOUNT_NAME, 
                                          account_key=TimeLineWorker.QUEUE_KEY)

    def insert_message(self):
        obj = {
            "message": "test message",
            "other_key": 10
        }
        message = unicode(json.dumps(obj))
        self.queue_service.put_message(TimeLineWorker.QUEUE_NAME, message)

    def get_next_message(self):
        messages = self.queue_service.get_messages(TimeLineWorker.QUEUE_NAME)
        for message in messages:
            print message.content
            self.queue_service.delete_message(TimeLineWorker.QUEUE_NAME, message.id, message.pop_receipt)
Example #22
0
def set_up_queue(queue_name):
    account_name = input('Please provide Azure account name:')
    account_key = input('Please provide Azure access key:')

    queue_service = QueueService(account_name, account_key)

    print('Creating queue: {} ...'.format(queue_name))

    try:
        queue_service.create_queue(queue_name)
    except AzureException as exc:
        print('Cannot create queue, reason: {}'.format(exc), file=sys.stderr)
        sys.exit(1)
    else:
        print('Queue successfully created.')

    print('Generating SAS token...')

    expiry_date = datetime.date.today() + datetime.timedelta(days=30)

    print('SAS token will expire in', str(expiry_date))

    access_policy = AccessPolicy(
        expiry=str(expiry_date),
        permission=(
            QueueSharedAccessPermissions.READ +
            QueueSharedAccessPermissions.ADD +
            QueueSharedAccessPermissions.PROCESS
        ),
    )

    sas_token = queue_service.generate_shared_access_signature(
        queue_name, SharedAccessPolicy(access_policy),
    )

    print('Generated SAS token:', sas_token)
    print('Please use above token in QueueManager.')
    def test_sas_add(self):
        # SAS URL is calculated from storage key, so this test runs live only
        if TestMode.need_recording_file(self.test_mode):
            return

        # Arrange
        queue_name = self._create_queue()
        token = self.qs.generate_queue_shared_access_signature(
            queue_name,
            QueuePermissions.ADD,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Act
        service = QueueService(
            account_name=self.settings.STORAGE_ACCOUNT_NAME,
            sas_token=token,
        )
        self._set_test_proxy(service, self.settings)
        result = service.put_message(queue_name, u'addedmessage')

        # Assert
        result = self.qs.get_messages(queue_name)
        self.assertEqual(u'addedmessage', result[0].content)
    def queue_sas(self):
        queue_name = self._create_queue()
        self.service.put_message(queue_name, u'message1')

        # Access only to the messages in the given queue
        # Process permissions to access messages
        # Expires in an hour
        token = self.service.generate_queue_shared_access_signature(
            queue_name,
            QueuePermissions.PROCESS,
            datetime.utcnow() + timedelta(hours=1),
        )

        # Create a service and use the SAS
        sas_service = QueueService(
            account_name=self.account.account_name,
            sas_token=token,
        )

        messages = sas_service.get_messages(queue_name)
        for message in messages:
            print(message.content) # message1

        self.service.delete_queue(queue_name)
Example #25
0
    def __init__(self):
        Thread.__init__(self)
        self._quit = Event()
        self.daemon = True
        self.log = logging.getLogger(__name__)

        settings = Settings()
        self.create_queue = 'create-simulated-machine'
        self.destroy_queue = 'destroy-simulated-machine'

        self.queue_service = QueueService(
            account_name=settings.azure_queue_account,
            account_key=settings.azure_queue_key
        )
        self.queue_service.create_queue(self.create_queue)
        self.queue_service.create_queue(self.destroy_queue)
Example #26
0
 def __init__(self, 
              account_name=None,
              account_key=None,
              protocol='https',
              queue='logs',
              message_ttl=None,
              visibility_timeout=None,
              base64_encoding=False,
              ):
     """
     Initialize the handler.
     """
     logging.Handler.__init__(self)
     self.service = QueueService(account_name=account_name,
                                 account_key=account_key,
                                 protocol=protocol)
     self.meta = {'hostname': gethostname(), 'process': os.getpid()}
     self.queue = _formatName(queue, self.meta)
     self.queue_created = False
     self.message_ttl = message_ttl
     self.visibility_timeout = visibility_timeout
     self.base64_encoding = base64_encoding
Example #27
0
 def __init__(self):
     self.queue_service = QueueService(account_name=TimeLineWorker.ACCOUNT_NAME, 
                                       account_key=TimeLineWorker.QUEUE_KEY)
Example #28
0
from azure.storage.queue import QueueService

queue_service = QueueService(
    account_name='intellimannstorage',
    account_key=
    'agA19rC5bc9JjbcCb8vyya/OqXCxrP4slPOG8L+dgVmdmKk3HRbgFu+f2W8eZDhc/uiMhadKIZseL9z1nHFwow=='
)

messages = queue_service.get_messages('myqueue')
metadata = queue_service.get_queue_metadata('myqueue')
count = metadata.approximate_message_count
print(count)
for message in messages:
    print(__dir__(message))
    print(message.content)
    print(type(message.content))
    queue_service.delete_message('myqueue', message.id, message.pop_receipt)
Example #29
0
def main():
    logging.basicConfig(level=logging.DEBUG)
    with open(TASKDATA) as taskdata_file:
        taskdata = json.loads(taskdata_file.read())
    github = GithubAdapter(taskdata)
    gh_commit = github.get_commit()
    config = github.get_config()
    blob_service = AppendBlobService(
        account_name=taskdata["storage_account_name"],
        account_key=taskdata["storage_account_key"])
    queue_service = QueueService(
        connection_string=taskdata["queue_connection_string"])
    loop = asyncio.get_event_loop()
    ctx = Context(loop=loop,
                  config=config,
                  blob_service=blob_service,
                  queue_service=queue_service,
                  taskdata=taskdata)

    blob_service.create_container("logs",
                                  fail_on_exist=False,
                                  public_access=PublicAccess.Blob)
    blob_service.create_blob("logs",
                             ctx.pid,
                             content_settings=ContentSettings(
                                 content_type="text/plain; charset=utf-8"))
    gh_commit.create_status(
        "pending",
        target_url=blob_service.make_blob_url("logs", ctx.pid),
        description="Build started",
        context=config.get("configuration_name",
                           "configuration%s" % taskdata["config_num"]))
    os.makedirs(REPOSDIR, exist_ok=True)
    # Check if we're the only process who updates the git cache on SMB share.
    # Otherwise skip updating.
    if not os.path.exists(LOCKFILENAME):
        lock = open(LOCKFILENAME, "w")
        lock.close()
        update_git_cache(ctx)
        os.unlink(LOCKFILENAME)

    if os.path.exists(SRCDIR):
        shutil.rmtree(SRCDIR)
    os.makedirs(os.path.join(SRCDIR, "build/conf"))
    with open(os.path.join(SRCDIR, "build/conf/auto.conf"), "a") as localconf:
        localconf.write("\n%s\n" % config.get("localconf", ""))
        localconf.write(AUTOCONFIG)

    repos = get_repos(config)
    repos.append((repodirname(taskdata["gh"]["repository"]["clone_url"]),
                  taskdata["gh"]["repository"]["clone_url"], None, None))
    for reponame, repourl, reporef, _ in repos:
        refrepopath = os.path.join(REPOSDIR, reponame)
        run(ctx,
            ["git", "clone", "--reference", refrepopath, repourl, reponame],
            cwd=SRCDIR)
        if reporef:
            LOG.info("Checkout %s to %s" % (reponame, reporef))
            run(ctx, ["git", "checkout", "%s" % reporef],
                cwd=os.path.join(SRCDIR, reponame))

    # Do checkout
    if taskdata["gh"]["type"] == "pull_request":
        LOG.info("Add remote repo %s" % taskdata["gh"]["clone_url"])
        run(ctx, [
            "git", "remote", "add", "contributor", taskdata["gh"]["clone_url"]
        ],
            cwd=os.path.join(
                SRCDIR,
                repodirname(taskdata["gh"]["repository"]["clone_url"])))
        LOG.info("Fetch contributor's repo")
        run(ctx, ["git", "fetch", "contributor"],
            cwd=os.path.join(
                SRCDIR,
                repodirname(taskdata["gh"]["repository"]["clone_url"])))
    LOG.info("Checkout %s to %s" % (repodirname(
        taskdata["gh"]["repository"]["clone_url"]), taskdata["gh"]["sha"]))
    run(ctx, ["git", "checkout", taskdata["gh"]["sha"]],
        cwd=os.path.join(
            SRCDIR, repodirname(taskdata["gh"]["repository"]["clone_url"])))

    # Fetch sstate if any
    if os.path.exists(get_sstate_archive_path(ctx)):
        with tarfile.open(name=get_sstate_archive_path(ctx),
                          mode="r:gz") as sstate_tar:
            sstate_tar.extractall(path=SRCDIR)

    addlayers = []
    for dep in config["dependencies"]:
        repodir = repodirname(dep["url"])
        layers = dep.get("layers", None)
        if layers:
            addlayers.extend([
                "bitbake-layers add-layer ../%s/%s" % (repodir, layer)
                for layer in layers
            ])
        else:
            addlayers.append("bitbake-layers add-layer ../%s" % repodir)
    addlayers.append("bitbake-layers add-layer ../%s" %
                     repodirname(taskdata["gh"]["repository"]["clone_url"]))

    run_script(ctx,
               BUILDSCRIPT % ("\n".join(addlayers), config["bitbake_target"]),
               cwd=SRCDIR)
    save_sstate(ctx)

    # Github auth token has expired by now most probably => renew
    github = GithubAdapter(taskdata)
    gh_commit = github.get_commit()
    gh_commit.create_status(
        "success",
        target_url=blob_service.make_blob_url("logs", ctx.pid),
        description="Target has been built successfully",
        context=config.get("configuration_name",
                           "configuration%s" % taskdata["config_num"]))
    loop.close()
    # TODO: copy cloud-init log files to share
    taskdata["build_result"] = "success"
    queue_service.put_message(
        "buildresults",
        base64.b64encode(json.dumps(taskdata).encode("utf")).decode("utf"))
class Queue:
    def __init__(self, account_name, account_key, queue_name="logqueue"):
        """Initialiaze a queue. The type is set by the
        'ACS_LOGGING_QUEUE_TYPE' environment variable. If it is set to
        'AzureStorageQueue' then values must be provided for
        'account_name' and 'account_key' which are values associated
        with the Azure Storage account. 'queue_name' is optional and
        defaults to 'logqueue'.

        """
        self.log = Log()
        self.queue_type = config.ACS_LOGGING_QUEUE_TYPE
        self.queue_name = queue_name
        # self.log.debug("Queue type: " + self.queue_type + " / " + self.queue_name)

        if self.queue_type == "AzureStorageQueue":
            self.createAzureQueues(account_name, account_key)
        elif self.queue_type == "LocalFile":
            self.file_queue = open(config.UNPROCESSED_LOG_FILE, 'w+')
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def createAzureQueues(self, account_name, account_key):
        """
        Create a queue for unprocessed log messages. Entries in the queue
        will be dequeued, processed and deleted upon success.
        """
        self.queue_service = QueueService(account_name, account_key)
        self.queue_service.create_queue(self.queue_name)

    def close(self):
        """Perform any necessary clearnup on the queue
           at the end of a run.
        """
        if self.queue_type == "AzureStorageQueue":
            pass
        elif self.queue_type == "LocalFile":
            self.file_queue.close()
        else:
            self.log.error("Unknown queue type: " + queue_type)

    def enqueue(self, msg):
        if self.queue_type == "LocalFile":
            file_queue.write(msg + '\n')
        elif self.queue_type == "AzureStorageQueue":
            self.queue_service.put_message(self.queue_name, msg)
        else:
            self.log.error("We don't know how to handle queues of type " +
                           self.queue_type)
        self.log.debug(msg)

    def dequeue(self):
        messages = []
        if self.queue_type == "LocalFile":
            with open(config.UNPROCESSED_LOG_FILE, 'r') as f:
                messages = f.readlines()[1]
        elif self.queue_type == "AzureStorageQueue":
            messages = self.queue_service.get_messages(self.queue_name)
        return messages

    def delete(self, message):
        self.queue_service.delete_message(self.queue_name, message.message_id,
                                          message.pop_receipt)
        #  with open(config.PROCESSED_LOG_FILE, 'a') as processed:
        #    processed.write(log)
        #  os.remove(config.UNPROCESSED_LOG_FILE)

    def delete_queue(self, queue_name):
        self.queue_service.delete_queue(queue_name, False)

    def getLength(self):
        """
        Get the approximate length of the queue
        """
        queue_metadata = self.queue_service.get_queue_metadata(self.queue_name)
        count = queue_metadata['x-ms-approximate-messages-count']
        return count

    def peek_messages(self, num_messages):
        """
        Peek at the top messages in the queue. This method does not remove the
        messages from the queue.
        """
        return self.queue_service.peek_messages(self.queue_name, num_messages)
 def create_queue_from_storage_account(storage_account, name, session):
     token = StorageUtilities.get_storage_token(session)
     queue_service = QueueService(
         account_name=storage_account.name,
         token_credential=token)
     return queue_service.create_queue(name)
Example #32
0
class Azure_Storage():
    def __init__(self, create_new=False):
        account_name = config.STORAGE_ACCOUNT_NAME
        account_key = config.STORAGE_ACCOUNT_KEY

        self.task_queue_name = config.TASK_QUEUE_NAME
        self.table_name = config.TABLE_NAME
        self.container_name = config.BLOB_CONTAINER_NAME
        self.ImagePartitionKey = config.IMAGE_PARTITION_KEY

        self.table_service = TableService(account_name=account_name,
                                          account_key=account_key)
        self.block_blob_service = BlockBlobService(account_name=account_name,
                                                   account_key=account_key)
        self.queue_service = QueueService(account_name=account_name,
                                          account_key=account_key)

        if create_new:
            queue_service.create_queue(task_queue_name)
            block_blob_service.create_container(container_name)
            table_service.create_table(table_name)

    def put_image(self, image_uuid, image_bytes):
        ret = self.block_blob_service.create_blob_from_bytes(
            self.container_name, image_uuid, image_bytes)
        return ret

    def get_image(self, image_uuid):
        ret = self.block_blob_service.get_blob_to_bytes(
            self.container_name, image_uuid).content
        return ret

    def put_classification_result(self, image_uuid, results):
        task = Entity()
        task.PartitionKey = self.ImagePartitionKey
        task.RowKey = image_uuid
        task.results = str(results)
        ret = self.table_service.insert_or_replace_entity(
            self.table_name, task)
        return ret

    def get_classification_result(self, image_uuid):
        try:
            task = self.table_service.get_entity(self.table_name,
                                                 self.ImagePartitionKey,
                                                 image_uuid)
            return task.results
        except Exception as e:
            return None

    def put_task(self, taskmsg):
        ret = self.queue_service.put_message(self.task_queue_name, taskmsg)
        return ret

    #payload is in message.content
    def get_task(self, num_messages=16):
        messages = self.queue_service.get_messages(self.task_queue_name,
                                                   num_messages=num_messages,
                                                   visibility_timeout=1 * 60)
        return messages

    def delete_task(self, message):
        ret = self.queue_service.delete_message(self.task_queue_name,
                                                message.id,
                                                message.pop_receipt)
        return ret
Example #33
0
from azure.storage.queue import QueueService

acc_name='<storage-account-name-here>'
key = '<unique-key-here>'

queue_service = QueueService(account_name=acc_name,
                account_key=key)

def enqueue_text__input(message):
'''
The textual 'message' to be inserted into the queue from client side is sent to the queue with name 'qname'
'''
    queue_service.put_message(qname, message)

def enqueue_binary__input(message):
'''
The binary 'message' to be inserted into the queue from client side is sent to the queue with name 'qname',
can be used for sending images etc.
'''
    queue_service.encode_function = QueueMessageFormat.binary_base64encode
    queue_service.put_message(qname, message)
Example #34
0
 def delete_queue_from_storage_account(storage_account, name, session):
     token = StorageUtilities.get_storage_token(session)
     queue_service = QueueService(
         account_name=storage_account.name,
         token_credential=token)
     return queue_service.delete_queue(name)
Example #35
0
    global next_call

    logging.info("Making new prediction...")

    # Make prediction
    prediction = get_queue_data()
    logging.info("Prediction is : " + str(prediction))

    # Upload prediction to server
    requests.post(PREDICTION_SERVER_URL + "/model",
                  json={"prediction": str(prediction)})

    next_call = next_call + PULL_INTERVAL
    threading.Timer(next_call - time.time(), run_loop).start()


if __name__ == '__main__':
    load_dotenv(find_dotenv())
    PREDICTION_SERVER_URL = os.getenv("PREDICTION_SERVER_URL",
                                      "http://20.42.27.246")
    ACCOUNT_KEY = os.getenv('ACCOUNT_KEY')
    ACCOUNT_NAME = os.getenv('ACCOUNT_NAME')
    logging.info("Key//Name:" + ACCOUNT_KEY + "//" + ACCOUNT_NAME)
    MAX_MESSAGE_LIMIT = 32
    PULL_INTERVAL = int(os.getenv('PULL_INTERVAL', 600))  # in seconds
    QUEUE_NAME = 'batchqueue'
    DEBUG = os.getenv('DEBUG', False)
    queue_service = QueueService(account_name=ACCOUNT_NAME,
                                 account_key=ACCOUNT_KEY)
    run_loop()
 def create_queue_from_storage_account(storage_account, name):
     keys = StorageUtilities.get_storage_keys(storage_account.id)
     queue_service = QueueService(account_name=storage_account.name,
                                  account_key=keys[0].value)
     return queue_service.create_queue(name)
Example #37
0
import socket

import logger

log = logger.setup_custom_logger('consumer')

try:
    with open('config.json') as config_file:
        config = json.load(config_file)
except FileNotFoundError:
    print("No config.json set")
    exit(1)

print("Getting events from: " + config['queueName'])

queue_service = QueueService(account_name=config['accountName'],
                             account_key=config['accountKey'])

doneCount = 0
doneInterval = 1000

date = datetime.datetime.now().isoformat()
hostname = socket.gethostname()
filepath = "{}/{}-{}-{}.json".format(config['dataDirectory'],
                                     config['queueName'], date, hostname)

with open(filepath, "w") as outfile:

    while True:
        messages = queue_service.get_messages(config['queueName'],
                                              num_messages=32)
from azure.storage.queue import QueueService
import sys
import json
import datetime
import socket
import pprint

import logger

log = logger.setup_custom_logger('consumer')

try:
    with open('config.json') as config_file:
        config = json.load(config_file)
except FileNotFoundError:
    print("No config.json set")
    exit(1)

print("Getting meta from: " + config['queueName'])

queue_service = QueueService(account_name=config['accountName'],
                             account_key=config['accountKey'])

metadata = queue_service.get_queue_metadata(config['queueName'])

log.info(metadata.approximate_message_count)
Example #39
0
def create_queue_of_proxies(proxies):
    queue_service = QueueService(account_name=account_name,
                                 account_key=account_key)
    queue_service.create_queue('proxies')
    for proxy in proxies:
        queue_service.put_message('proxies', proxy)
class Results(object):
    """
    Handles interacting with encrypted results in blob storage.
    """
    def __init__(self, logger, redisHost, redisPort):
        """
        Initializes a new instance of the JobStatus class.

        :param logger logger: The logger instance to use for logging
        :param str redis_host: Redis host where the Redis Q is running
        :param int redis_port: Redis port where the Redis Q is running
        """
        self.logger = logger
        self.config = Config()
        self.redis_host = redisHost
        self.redis_port = redisPort
        # create an instance of AESCipher to use for encryption
        aesHelper = AESHelper(self.config)
        self.aescipher = aesHelper.create_aescipher_from_config()
        if (self.init_storage_services() is False):
            raise Exception(
                "Errors occurred instantiating results storage service.")

    def init_storage_services(self):
        """
        Initializes the storage service clients using values from config.py.
        :return: True on success. False on failure.
        :rtype: boolean
        """
        try:
            # creates instance of BlockBlobService and AppendBlobService to use for completed results storage
            self.storage_service = BlockBlobService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.results_container_sas_token)
            self.append_storage_service = AppendBlobService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.results_container_sas_token)
            self.storage_service.create_container(
                self.config.results_container_name)

            # creates instances of Azure QueueService
            self.job_status_queue_service = QueueService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.job_status_queue_sas_token)
            self.job_status_queue_service.encode_function = models.QueueMessageFormat.noencode
            self.results_queue_service = QueueService(
                account_name=self.config.storage_account_name,
                sas_token=self.config.results_queue_sas_token)
            self.results_queue_service.create_queue(
                self.config.results_container_name)
            self.results_queue_service.encode_function = models.QueueMessageFormat.noencode

            # creates instance of Redis client to use for job status storage
            pool = redis.ConnectionPool(host=self.redis_host,
                                        port=self.redis_port)
            self.storage_service_cache = redis.Redis(connection_pool=pool)

            return True
        except Exception as ex:
            self.log_exception(ex, self.init_storage_services.__name__)
            return False

    def log_exception(self, exception, functionName):
        """
        Logs an exception to the logger instance for this class.

        :param Exception exception: The exception thrown.
        :param str functionName: Name of the function where the exception occurred.
        """
        self.logger.debug("Exception occurred in: " + functionName)
        self.logger.debug(type(exception))
        self.logger.debug(exception)

    def write_result(self, result):
        """
        Encrypts and writes result to queue

        :param str result: The result to write to queue
        :return: True on success. False on failure.
        :rtype: boolean
        """
        try:
            # encrypt the encoded result and then encode it
            encryptedResult = base64.b64encode(self.aescipher.encrypt(result))

            # put the encoded result into the azure queue for future consolidation
            self.results_queue_service.put_message(
                self.config.results_queue_name, encryptedResult)

            return True
        except Exception as ex:
            self.log_exception(ex, self.write_result.__name__)
            return False

    def count_consolidated_results(self):
        """
        Returns a count of results that were consolidated.

        "return: int count: Total count of results that were consolidated.
        """
        try:
            consolidatedResults = self.storage_service_cache.get(
                self.config.results_consolidated_count_redis_key)
            return consolidatedResults
        except Exception as ex:
            self.log_exception(ex, self.count_consolidated_results.__name__)
            return False

        except Exception as ex:
            self.log_exception(
                ex, self.consolidate_results.__name__ +
                " - Error consolidating result blob.")

    def consolidate_results(self):
        """
        Consolidates all individual result files into single result file in storage. Blobs are deleted once they
        are added to the consolidated file.

        "return: int count: Total count of results consolidated in result file.
        """
        try:
            # ensure the consolidated append blob exists
            if not self.append_storage_service.exists(
                    self.config.results_container_name,
                    blob_name=self.config.results_consolidated_file):
                self.append_storage_service.create_blob(
                    self.config.results_container_name,
                    self.config.results_consolidated_file)

            result_messages = []
            with io.BytesIO() as consolidated_result:
                while len(result_messages
                          ) < self.config.result_consolidation_size:
                    messages = self.results_queue_service.get_messages(
                        self.config.results_queue_name,
                        min(self.config.result_consolidation_size, 32))

                    # If the queue is empty, stop and consolidate
                    if not messages:
                        break

                    # add the message to the memory stream
                    for msg in messages:
                        consolidated_result.write(msg.content + "\n")
                        result_messages.append(msg)

                # append the results to the consolidated file
                consolidated_result.seek(0)
                self.append_storage_service.append_blob_from_stream(
                    self.config.results_container_name,
                    self.config.results_consolidated_file, consolidated_result)

            # remove all of the messages from the queue
            num_of_consolidated_results = len(result_messages)
            for msg in result_messages:
                self.results_queue_service.delete_message(
                    self.config.results_queue_name, msg.id, msg.pop_receipt)
            self.storage_service_cache.incrby(
                self.config.results_consolidated_count_redis_key,
                num_of_consolidated_results)

            # write the count of results we consolidated out to queue to provide status
            self.job_status_queue_service.put_message(
                self.config.job_status_queue_name,
                str(num_of_consolidated_results) + " results consolidated.")

            return len(result_messages)

        except Exception as ex:
            self.log_exception(ex, self.consolidate_results.__name__)
            return len(result_messages)

    def get_total_jobs_consolidated_status(self):
        """
        Write out the the current state of the workload; the percentage of jobs that are completed and consolidated
        "return: float status: percentage of completed jobs
        """
        # log out total job status
        total_scheduled_jobs = self.storage_service_cache.get(
            self.config.scheduled_jobs_count_redis_key)
        total_consolidated_results = self.storage_service_cache.get(
            self.config.results_consolidated_count_redis_key)

        if total_consolidated_results is None:
            total_consolidated_results = "0"

        status_message = "Total: " + total_consolidated_results + "/" + total_scheduled_jobs + " jobs have been successfully processed and consolidated."
        self.logger.info(status_message)
        self.job_status_queue_service.put_message(
            self.config.job_status_queue_name, status_message)

        return float(total_consolidated_results) / int(total_scheduled_jobs)
Example #41
0
import subprocess


def get_stdout_from_cmd(command):
    try:
        p = subprocess.Popen(command,
                             stdout=subprocess.PIPE,
                             stderr=subprocess.STDOUT)
        stdout = p.stdout.readlines()
    except:
        raise Exception(
            'Unable to get_stdout_from_cmd(). Maybe fortune is not installed?')
    result = b''.join(stdout)
    return result


queue_service = QueueService(account_name=accName, account_key=accKey)
queue_service.encode_function = QueueMessageFormat.text_base64encode

t = Terminal()
print(t.red('Writing to Azure Storage Queue ' + queueName + '...'))

for i in range(1, 20):
    cmd = 'fortune'
    msg = get_stdout_from_cmd(cmd)
    queue_service.put_message(queueName, msg.decode("utf-8"))
    sys.stdout.write('!')
    sys.stdout.flush()

print('\nDone adding messages.')
Example #42
0
from azure.storage.queue import QueueService
import sys
import json

config = {
    "accountName": "",
    "accountKey": "",
    "queueName": "",
    "producerCount": 10
}

try:
    with open('config.json') as config_file:
        config = json.load(config_file)
except FileNotFoundError:
    print("No config.json set")

exit(0)

print("Sending events to: " + config.queueName)

queue_service = QueueService(account_name=config.accountName,
                             account_key=config.accountKey)

while True:
    line = sys.stdin.readline()
    if line:
        queue_service.put_message(config.queueName, line)
Example #43
0
        self.text = text
        self.user_id = user_id


###################################### SETUP ######################################

# Data read in from env vars..
CONS_SECRET = os.environ.get("TW_MOE_SECRET")
ENVNAME = os.environ.get('ENVIRON_NAME')
WEBHOOK_URL = os.environ.get('MOE_URL')
MOEQUE_CONN_STRING = os.environ.get('MOEQUE_CONN_STRING')
MOE_ID = os.environ.get('MOE_ID')
queue_name = os.environ.get('QUEUE_NAME')

# Set up queue w/ Azure
queue_service = QueueService(connection_string=MOEQUE_CONN_STRING)
queue_service.create_queue(queue_name)

# Supported commands, will probably add more here later.
commands = ['moerand', 'moename', 'moemess']
match_commands = re.compile(
    r'(?:{})'.format('|'.join(map(re.escape, commands))), re.IGNORECASE)

app = Flask(__name__)

###################################### APP ######################################


@app.errorhandler(404)
def not_found(e):
    print("404")
Example #44
0
import logging
import azure.functions as func
import numpy as np
import json, base64, time, random, string
from azure.storage.blob import BlockBlobService, PublicAccess
from azure.storage.blob.models import BlobBlock
from azure.storage.queue import QueueService, QueueMessageFormat

########################################################################################################################
# Credentials

blob_service = BlockBlobService(account_name='', account_key='')

queue_service = QueueService(account_name='', account_key='')

queue_service.encode_function = QueueMessageFormat.text_base64encode

#######################################################################################################################


def convert_to_string(t):
    if len(t) == 1:
        return str(t[0])
    elif len(t) == 2:
        return str(t[0]) + 'S' + str(t[1])
    else:
        return str(t[0]) + 'S' + str(t[1]) + 'S' + str(t[2])


def convert_int_from_string(s):
    s_split = s.split('S')
###
print('\nLet\'s create an Azure Storage Queue to drop some messages on.')
raw_input('Press Enter to continue...')

# Each storage account has a primary and secondary access key.
# These keys are used by aplications to access data in your storage account, such as Queues.
# Obtain the primary storage access key for use with the rest of the demo

response = azurerm.get_storage_account_keys(auth_token, subscription_id,
                                            resourcegroup_name,
                                            storageaccount_name)
storageaccount_keys = json.loads(response.text)
storageaccount_primarykey = storageaccount_keys['keys'][0]['value']

# Create the Queue with the Azure Storage SDK and the access key obtained in the previous step
queue_service = QueueService(account_name=storageaccount_name,
                             account_key=storageaccount_primarykey)
response = queue_service.create_queue('pizzaqueue')
if response == True:
    print('Storage Queue: pizzaqueue created successfully.\n')
else:
    print('Error creating Storage Queue.\n')

###
# Use the Azure Storage Storage SDK for Python to drop some messages in our Queue
###
print(
    'Now let\'s drop some messages in our Queue.\nThese messages could indicate a take-out order being received for a customer ordering pizza.'
)
raw_input('Press Enter to continue...')

# This basic example creates a message for each pizza ordered. The message is *put* on the Queue.
Example #46
0
 def get_queue_client_by_storage_account(storage_account, session):
     token = StorageUtilities.get_storage_token(session)
     queue_service = QueueService(account_name=storage_account.name,
                                  token_credential=token,
                                  endpoint_suffix=session.storage_endpoint)
     return queue_service
Example #47
0
 def _get_queue_client_from_storage_account(storage_account, token):
     return QueueService(account_name=storage_account['name'], token_credential=token)
Example #48
0
# Azure storage account and secret key
azure_storage_account = ""
key=""

import random, string
from azure.storage.queue import QueueService

# Initialize the service
queue_service = QueueService(account_name=azure_storage_account, account_key=key)

# Create a new queue
queue_service.create_queue('testqueue')

# Put a message into the queue
rnd = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(32))

print (rnd)

queue_service.put_message('testqueue', rnd)

# Peek at all messages in the queue - without dequeuing or deleting
messages = queue_service.peek_messages('testqueue')
for message in messages:
	print ("expiration_time: " + message.expiration_time)
	print ("insertion_time: " + message.insertion_time)
	print ("message_id: " + message.message_id)
	print ("message_text: " + message.message_text)
	print ("expiration_time: " + message.expiration_time)
	print ("pop_receipt: " + message.pop_receipt)
	print ("time_next_visible: " + message.time_next_visible)
Example #49
0
class TestTestQueue(TestCase):

    def test_push(self):
        self.client = QueueService(account_name='', account_key='')
        self.client.put_message('testqueue', u"Test message")
Example #50
0
def get_queue_service():
    "Returns the QueueService that can be used to put, peek, get and delete messages"
    queue_service = QueueService(ACCOUNT_NAME, ACCOUNT_KEY)
    queue_service.create_queue(QUEUE_NAME)
    return queue_service
Example #51
0
 def __init__(self, api, generator, queue_name, conn_str):
     self.api = api
     self.gen_util = Generate_util(generator)
     self.queue_name = queue_name
     self.queue_service = QueueService(connection_string=conn_str)
     self.queue_service.create_queue(queue_name)