def ingest_from_blob(self, blob_descriptor: BlobDescriptor,
                         ingestion_properties: IngestionProperties):
        """
        Enqueue an ingest command from azure blobs.
        To learn more about ingestion methods go to:
        https://docs.microsoft.com/en-us/azure/data-explorer/ingest-data-overview#ingestion-methods
        :param azure.kusto.ingest.BlobDescriptor blob_descriptor: An object that contains a description of the blob to be ingested.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
        """
        try:
            queues = self._resource_manager.get_ingestion_queues()
        except KustoServiceError as ex:
            self._validate_endpoint_service_type()
            raise ex

        random_queue = random.choice(queues)
        queue_service = QueueServiceClient(random_queue.account_uri)
        authorization_context = self._resource_manager.get_authorization_context(
        )
        ingestion_blob_info = _IngestionBlobInfo(
            blob_descriptor,
            ingestion_properties=ingestion_properties,
            auth_context=authorization_context)
        ingestion_blob_info_json = ingestion_blob_info.to_json()
        # TODO: perhaps this needs to be more visible
        content = ingestion_blob_info_json
        queue_client = queue_service.get_queue_client(
            queue=random_queue.object_name,
            message_encode_policy=TextBase64EncodePolicy())
        queue_client.send_message(content=content)
示例#2
0
def get_queue(queue_name, create_queue, clear_queue):
    ''' Note that generating the queueclient does not mean there must a queue there as one of the properties of queueclient is "create_queue", so it's 
    really a representation of a queue which may or may not exist yet. '''
    keyVaultName = os.environ["KEY_VAULT_NAME"]
    keyVault_URI = "https://" + keyVaultName + ".vault.azure.net"
    credential = DefaultAzureCredential()
    client = SecretClient(vault_url=keyVault_URI, credential=credential)
    data_access_key = client.get_secret("thecupstore-key")
    account_url = "https://thecupstore.queue.core.windows.net/"
    queueclient = QueueClient(account_url=account_url,
                              queue_name=queue_name,
                              credential=data_access_key.value,
                              message_encode_policy=TextBase64EncodePolicy(),
                              message_decode_policy=TextBase64DecodePolicy())
    # Check that the queue exists and if not create it if the create switch has been passed as True
    try:
        queueclient.get_queue_properties()
    except:
        if create_queue:
            queueclient.create_queue()
        else:
            message = "Queue does not exist"
    else:
        if clear_queue:
            queueclient.clear_messages()

    if 'message' in locals():  # checks for existence of message variable
        return message
    else:
        return queueclient
def main(msg: func.QueueMessage) -> None:
    queue_client = QueueClient.from_connection_string(
        os.environ['AzureWebJobsStorage'],
        'oat-queue',
        message_encode_policy=TextBase64EncodePolicy(),
    )
    queue_client.send_message(msg.get_body().decode(), visibility_timeout=3600)
示例#4
0
    def ingest_from_blob(
            self, blob_descriptor: BlobDescriptor,
            ingestion_properties: IngestionProperties) -> IngestionResult:
        """Enqueue an ingest command from azure blobs.
        To learn more about ingestion methods go to:
        https://docs.microsoft.com/en-us/azure/data-explorer/ingest-data-overview#ingestion-methods
        :param azure.kusto.ingest.BlobDescriptor blob_descriptor: An object that contains a description of the blob to be ingested.
        :param azure.kusto.ingest.IngestionProperties ingestion_properties: Ingestion properties.
        """
        try:
            queues = self._resource_manager.get_ingestion_queues()
        except KustoServiceError as ex:
            self._validate_endpoint_service_type()
            raise ex

        random_queue = random.choice(queues)
        queue_service = QueueServiceClient(random_queue.account_uri,
                                           proxies=self._proxy_dict)
        authorization_context = self._resource_manager.get_authorization_context(
        )
        ingestion_blob_info = IngestionBlobInfo(
            blob_descriptor,
            ingestion_properties=ingestion_properties,
            auth_context=authorization_context)
        ingestion_blob_info_json = ingestion_blob_info.to_json()
        queue_client = queue_service.get_queue_client(
            queue=random_queue.object_name,
            message_encode_policy=TextBase64EncodePolicy())
        queue_client.send_message(content=ingestion_blob_info_json,
                                  timeout=self._SERVICE_CLIENT_TIMEOUT_SECONDS)

        return IngestionResult(IngestionStatus.QUEUED,
                               ingestion_properties.database,
                               ingestion_properties.table,
                               blob_descriptor.source_id, blob_descriptor.path)
    def test_message_text_base64(self, storage_account_name, storage_account_key):
        # Arrange.
        qsc = QueueServiceClient(self.account_url(storage_account_name, "queue"), storage_account_key)
        queue = QueueClient(
            account_url=self.account_url(storage_account_name, "queue"),
            queue_name=self.get_resource_name(TEST_QUEUE_PREFIX),
            credential=storage_account_key,
            message_encode_policy=TextBase64EncodePolicy(),
            message_decode_policy=TextBase64DecodePolicy())

        message = u'\u0001'

        # Asserts
        self._validate_encoding(queue, message)
    def test_message_text_base64(self):
        # Arrange.
        queue_url = self._get_queue_url()
        credentials = self._get_shared_key_credential()
        queue = QueueClient(
            queue_url=queue_url,
            queue=self.get_resource_name(TEST_QUEUE_PREFIX),
            credential=credentials,
            message_encode_policy=TextBase64EncodePolicy(),
            message_decode_policy=TextBase64DecodePolicy())

        message = u'\u0001'

        # Asserts
        self._validate_encoding(queue, message)
示例#7
0
    async def test_message_text_base64(self, resource_group, location,
                                       storage_account, storage_account_key):
        # Arrange.
        qsc = QueueServiceClient(self._account_url(storage_account.name),
                                 storage_account_key,
                                 transport=AiohttpTestTransport())
        queue = QueueClient(queue_url=self._account_url(storage_account.name),
                            queue=self.get_resource_name(TEST_QUEUE_PREFIX),
                            credential=storage_account_key,
                            message_encode_policy=TextBase64EncodePolicy(),
                            message_decode_policy=TextBase64DecodePolicy(),
                            transport=AiohttpTestTransport())

        message = '\u0001'

        # Asserts
        await self._validate_encoding(queue, message)
示例#8
0
def upload(globpath, container, queue, sas_token_env, storage_account_uri):
    try:
        sas_token_env = sas_token_env
        sas_token = os.getenv(sas_token_env)
        if sas_token is None:
            getLogger().error(
                "Sas token environment variable {} was not defined.".format(
                    sas_token_env))
            return 1

        files = glob(globpath, recursive=True)

        for infile in files:
            blob_name = get_unique_name(infile, os.getenv('HELIX_WORKITEM_ID'))

            getLogger().info("uploading {}".format(infile))

            blob_client = BlobClient(
                account_url=storage_account_uri.format('blob'),
                container_name=container,
                blob_name=blob_name,
                credential=sas_token)

            with open(infile, "rb") as data:
                blob_client.upload_blob(data,
                                        blob_type="BlockBlob",
                                        content_settings=ContentSettings(
                                            content_type="application/json"))

            if queue is not None:
                queue_client = QueueClient(
                    account_url=storage_account_uri.format('queue'),
                    queue_name=queue,
                    credential=sas_token,
                    message_encode_policy=TextBase64EncodePolicy())
                queue_client.send_message(blob_client.url)

            getLogger().info("upload complete")
        return 0

    except Exception as ex:
        getLogger().error('{0}: {1}'.format(type(ex), str(ex)))
        getLogger().error(format_exc())
        return 1
示例#9
0
文件: main.py 项目: Ratomir/dataweek
from azure.storage.queue import (QueueClient, TextBase64EncodePolicy)

try:
    connection_string = os.environ['QUEUE_STORAGE']
    queue_name = os.environ['QUEUE_NAME']

    parties = [
        'it'.upper(), 'csharp'.upper(), 'java'.upper(), 'python'.upper()
    ]
    regions = ['A', 'B', 'C', 'D']
    places = ['Sarajevo', 'Zenica', 'Banja Luka', 'Pale']

    queue = QueueClient.from_connection_string(
        conn_str=connection_string,
        queue_name=queue_name,
        message_encode_policy=TextBase64EncodePolicy())

    for count in range(0, int(sys.argv[1])):
        msg_body = {
            "party": parties[randint(0, 3)],
            "count": randint(1, 25),
            "electoralPlace": places[randint(0, 3)],
            "electoralUnit": regions[randint(0, 3)]
        }

        queue.send_message(json.dumps(msg_body))

        print("Message {} has been sent successfully".format(count))

    print("Votes have been submited")
示例#10
0
def main(req: func.HttpRequest) -> func.HttpResponse:
    utc_timestamp = datetime.datetime.utcnow().replace(
        tzinfo=datetime.timezone.utc).isoformat()

    connect_str = os.getenv('source_files_connection')
    target_connect_str = os.getenv('target_files_connection')

    queue_client = QueueClient.from_connection_string(conn_str=target_connect_str, queue_name='scalequeue2', message_encode_policy=TextBase64EncodePolicy())
    blob_service_client = BlobServiceClient.from_connection_string(connect_str)
    container_client = blob_service_client.get_container_client('fake-date-files')

    blob_list = container_client.list_blobs()
    count = 0
    for blob in blob_list:
        logging.info(blob.name)
        queue_client.send_message(content=blob.name)
        count += 1
示例#11
0
def main(trigger: func.QueueMessage):
    '''
    The function has to use imported code libraries to write to the queue because otherwise writes are 
    only done when the function has finished.
    '''
    logging.info('matchengine triggered')
    message = trigger.get_body().decode(
    )  # to decode to utf-8 and remove leading b'

    # The message coming in has to be just text for base 64 decoding, so expect a string of team names in fixture list order.
    team_list = message.split(",")
    # Remove the first element as this tells us whether we're playing normal or extra time or penalties
    game_stage = team_list.pop(0)
    query_string = ""
    for team in team_list:
        query_string += "Name eq \'" + team + "\' or "
    query_string = query_string[:-4]  # Remove trailing ' or '

    # Get the team stats from the table
    keyVaultName = os.environ["KEY_VAULT_NAME"]
    keyVault_URI = "https://" + keyVaultName + ".vault.azure.net"
    credential = DefaultAzureCredential()
    client = SecretClient(vault_url=keyVault_URI, credential=credential)
    data_access_key = client.get_secret("thecupstore-key")

    table_service = TableService(account_name='thecupstore',
                                 account_key=data_access_key.value)
    team_stats = table_service.query_entities('Teams', filter=query_string)

    # Set up the queue to write goals and timer intervals to
    account_url = "https://thecupstore.queue.core.windows.net/"
    queue_name = "goalqueue"
    goal_queue = QueueClient(account_url=account_url,
                             queue_name=queue_name,
                             credential=data_access_key.value,
                             message_encode_policy=TextBase64EncodePolicy())

    # Get in fixture list format and create the current round ready to play
    fixtures = create_fixtures(team_list)
    current_round = Round(fixtures, team_stats)
    matches = current_round.get_matches()
    if game_stage == "normal":
        MATCH_LENGTH = 90
        match_time = 1
    elif game_stage == "extra":
        MATCH_LENGTH = 120
        match_time = 91
    else:
        match_time = 120

    if game_stage == "normal" or game_stage == "extra":
        while match_time <= MATCH_LENGTH:
            for match in matches:
                for team in match:
                    if goal_chance(team["goal_chance"]):
                        # goal chance created. Check if saved.
                        if goal_saved(team["keeping"]):
                            pass
                        else:
                            # goal scored
                            goal_queue.send_message(team["name"])

            logging.info('writing timer to queue ' + str(match_time))
            goal_queue.send_message(str(match_time))
            # Check if the goalqueue is clear before continuing. This is to keep the matchengine in sync with the user form. This way they should see a smooth
            # progression of the timer. Without this check matchengine tends to run fast and multiple second jumps are observed.
            while goal_queue.get_queue_properties(
            ).approximate_message_count > 0:
                time.sleep(0.05)

            match_time += 1

    elif game_stage == "penalties":
        # each team has 5 penalty kicks
        for penalty_number in range(5):
            for match in matches:
                for team in match:
                    if penalty_goal(75):
                        goal_queue.send_message(team["name"])
        # add a message to inform game that penalties have completed
        goal_queue.send_message("done")

    elif game_stage == "suddendeath":
        # sudden death penalties
        for match in matches:
            for team in match:
                if penalty_goal(75):
                    goal_queue.send_message(team["name"])
        # add a message to inform game that a round of sudden death penalties have completed
        goal_queue.send_message("done")

    logging.info('matchengine complete')