예제 #1
0
def main(msg: func.QueueMessage) -> None:
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))

    personstring = msg.get_body().decode('utf-8')
    person = json.loads(personstring)

    graphclient = createclient()

    query = "g.addV('profile').property('id', '" + person[
        'id'] + "').property('name', '" + person[
            'name'] + "').property('city', '" + person['city'] + "')"
    insert_vertices(graphclient, query)

    try:
        connections = person['connections']
    except KeyError:
        connections = None

    if connections != None:
        for connection in connections:
            query = "g.V('" + person['id'] + "').addE('" + connection[
                'relationship'] + "').to(g.V('" + connection[
                    'relatedperson'] + "'))"
            insert_edges(client, query)
예제 #2
0
def main(msg: func.QueueMessage) -> None:
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))
    blob_name = msg.get_body().decode('utf-8')
    connection_string = os.environ["snibirkedastor_STORAGE"]
    blob_service_client = BlobServiceClient.from_connection_string(
        connection_string)
    start_time = time.perf_counter()
    try:
        blob = BlobClient.from_connection_string(conn_str=connection_string,
                                                 container_name="grayscale",
                                                 blob_name=blob_name)
        data = blob.download_blob().content_as_bytes(max_concurrency=1)
        dt = np.fromstring(data, dtype='uint8')
        gray = cv2.imdecode(dt, cv2.IMREAD_UNCHANGED)
        edges = cv2.Canny(gray, 60, 120)
        pil_image = Image.fromarray(edges)
        img_byte_arr = BytesIO()
        pil_image.save(img_byte_arr, format='JPEG')
        img_byte_arr = img_byte_arr.getvalue()
        #upload canny
        container_name = "canny"
        container_client = blob_service_client.get_container_client(
            container_name)
        container_client.upload_blob(name=blob.blob_name, data=img_byte_arr)
    except:
        pass
    end_time = time.perf_counter()
    logging.info(
        f"Container Name: {container_name}, Blob Name: {blob.blob_name}, Time Taken: {end_time - start_time}"
    )
예제 #3
0
def main(msg: func.QueueMessage) -> None:
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))
    decoded_message = msg.get_body().decode('utf-8')

    try:
        export_job_details = json.loads(decoded_message)
        export_job_id = export_job_details[
            'exportJobId'] if 'exportJobId' in export_job_details else ''
        chunk_id = export_job_details[
            'chunkId'] if 'chunkId' in export_job_details else ''

        if export_job_id == '' or chunk_id == '':
            logging.warn('missing information to process a chunk')
            logging.warn(f'message sent - {decoded_message}')
            logging.warn(
                f'cannot process without export job ID and chunk ID -- found job ID {export_job_id} - chunk ID {chunk_id}'
            )
            logging.warn('Removing from asset poison queue')
            return

        assets_table = ExportsTableStore(connection_string, assets_table_name)
        if assets_table.get(export_job_id, chunk_id) is not None:
            assets_table.merge(export_job_id, str(chunk_id),
                               {'jobStatus': TenableStatus.failed.value})
        return
    except Exception as e:
        logging.warn('Could not process job or chunk')
        logging.warn(f'Raised this exception {e}')
        logging.warn('Removing from asset poison queue')
        return
예제 #4
0
def main(msg: func.QueueMessage, context: func.Context) -> None:
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))
    data = json.loads(msg.get_body().decode('utf-8'))
    if data['type'] == 'view_submission':
        view_id = data['view']['id']
        values = data['view']['state']['values']
        process_form(values, view_id, context.function_directory)
예제 #5
0
async def main(msg: func.QueueMessage, starter: str) -> None:
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))

    client = df.DurableOrchestrationClient(starter)

    instance_id = await client.start_new(
        orchestration_function_name="Orchestrator",
        instance_id=None,
        client_input=msg.get_body().decode('utf-8')
    )
예제 #6
0
def main(msg: func.QueueMessage) -> None:
    logging.info('Python firstline request made: %s',
                 msg.get_body().decode('utf-8'))
    user = json.loads(msg.get_body().decode('utf-8'))

    # Authenticate with the Microsoft Graph
    authenticate()

    # Add user to Azure AD
    id = addUser(user)

    # Add employee to AD groups
    addUserToGroup(id)
예제 #7
0
def main(msgIn: func.QueueMessage):
    try:
        args = json.loads(msgIn.get_body())
    except:
        args = msgIn.get_json()

    os.environ['__PW_ACTIVATION_ID'] = str(msgIn.id)
    if 'remote_invoker' in args:
        logger.info("Pywren v{} - Starting invoker".format(__version__))
        function_invoker(args)
    else:
        logger.info("Pywren v{} - Starting execution".format(__version__))
        function_handler(args)

    return {"Execution": "Finished"}
예제 #8
0
def main(eeimsg: func.QueueMessage) -> None:

    detection = json.loads(eeimsg.get_body().decode('utf-8'))
    logging.info(f"Queue trigger function processed item: {detection['id']}")

    # Set variables
    base_url = os.environ['baseUrl']
    username = os.environ['eeiUsername']
    password = os.environ['eeiPassword']
    domain = bool(strtobool(os.environ['domainLogin']))
    verify = bool(strtobool(os.environ['verifySsl']))
    workspace_id = os.environ['workspaceId']
    workspace_key = os.environ['workspaceKey']
    log_type = 'ESETEnterpriseInspector'

    # Connect to ESET Enterprise Inspector server
    ei = EnterpriseInspector(
        base_url=base_url,
        username=username,
        password=password,
        domain=domain,
        verify=verify
    )

    # Get detection details
    detection_details = ei.detection_details(detection)

    # Send data via data collector API
    body = json.dumps(detection_details)
    post_data(
        customer_id=workspace_id,
        shared_key=workspace_key,
        body=body,
        log_type=log_type
    )
def main(msg: func.QueueMessage, contentStream: func.InputStream, gsheetQueue: func.Out[str], statsQueue: func.Out[str]) -> None:

    conn_string = os.environ["DBConnectionString"]

    # Only process SLO files
    if not msg.get_body().decode("utf-8").endswith("-slo.txt"):
        logging.error("Unrecognized file name. Skipping.")
        return

    try:
        content = contentStream.read()
        data = slo_file_parser.parse_slo_file(content)
        logging.info("Parsed SLO file.")

    except Exception as e:
        logging.error("Failed to parse SLO file.")
        logging.error(e)
        raise

    save_db(data, conn_string)

    statsQueue.set("Database Updated")
    logging.info("Added entry to stats queue.")

    gsheetQueue.set(json.dumps(data))
    logging.info("Added entry to GSheet queue.")
예제 #10
0
def main(msg: func.QueueMessage) -> None:
    try:
        message = OATQueueMessage.parse_obj(msg.get_json())
        clp_id = message.clp_id
        detections = message.detections
        post_data = message.post_data

        token = utils.find_token_by_clp(clp_id, API_TOKENS)

        if not token:
            raise GeneralException(f'Token not found for clp: {clp_id}')

        # get workbench detail
        raw_logs = get_search_data(token, post_data)

        # transform data
        transfromed_logs = _transfrom_logs(clp_id, detections, raw_logs)

        # send to log analytics
        log_analytics = LogAnalytics(WORKSPACE_ID, WORKSPACE_KEY, OAT_LOG_TYPE)
        log_analytics.post_data(transfromed_logs)
        logging.info(f'Send oat data successfully. count: {len(transfromed_logs)}.')

    except HTTPError as e:
        logging.exception(f'Fail to get search data! Exception: {e}')
        raise
    except:
        logging.exception('Internal error.')
        raise
예제 #11
0
def main(msg: func.QueueMessage) -> None:

    # we need a connection to arcgis, so start there
    gis = GIS(username=config.arcgis_username, password=config.arcgis_password)
    logging.info(f'Connected to GIS at {gis.url}.')

    # since the input message content is a single feature, extract the reach id from the feature
    feature_json = json.loads(msg.get_body())
    reach_id = feature_json['attributes']['reach_id']
    logging.info(f'Preparing to update reach id {reach_id}.')

    # create a reach object to work with
    reach = Reach.get_from_aw(reach_id)
    logging.info(f'Retrieved reach id {reach_id} from AW.')

    # do the hard work, trace it
    reach.snap_putin_and_takeout_and_trace(gis=gis)
    logging.info(f'Successfully traced {reach_id}.')

    # create layers to be updated
    lyr_centroid = ReachFeatureLayer(config.url_reach_centroid, gis)
    lyr_line = ReachFeatureLayer(config.url_reach_line, gis)
    lyr_points = ReachPointFeatureLayer(config.url_reach_points, gis)

    # update ArcGIS Online
    reach.publish_updates(lyr_line, lyr_centroid, lyr_points)
    logging.info(f'Successfully updated {reach_id} on ArcGIS Online.')
def main(msg: func.QueueMessage) -> None:
    """
    Basically the startpoint for the whole application,
    parses the received event and calls the relevant functions.
    """
    configurer = azops.Configurer(options=azops.Options.from_env(),
                                  credentials=DefaultAzureCredential())
    qmsg = msg.get_body().decode("utf-8")
    event = UpdateEvent.parse_raw(qmsg)
    # Make sure there's at least one config
    if event.next_config is None and event.previous_config is None:
        logging.warning("No next or previous configuration")
        return

    if event.next_config:
        configurer.create_or_update_client_config(
            azops.CreateOrUpdateEvent(
                id=event.id,
                previous_config=event.previous_config,
                next_config=event.next_config,
            ))
        return
    if event.previous_config:
        configurer.delete_client_config(
            azops.DeleteEvent(
                id=event.id,
                previous_config=event.previous_config,
            ))
        return
예제 #13
0
def main(msg: func.QueueMessage) -> None:
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))

    # get bearer token and authenticate to ADLSgen2 using Managed Identity of Azure Function
    credentials = MSIAuthentication(resource='https://storage.azure.com/')
    blob_service = BlockBlobService("testedlstorgen",
                                    token_credential=credentials)

    # get timestamp
    now = datetime.now()
    nowstr = datetime.strftime(datetime.now(), "%Y%m%dT%H%M%S%Z")
    key = round((now - datetime(2019, 1, 1, 0, 0, 0)).total_seconds())
    logging.info("key: " + str(key))

    # Add record to csv file. Notice that AppendBlob is not yet supported on ADLSgen2, see https://docs.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-known-issues
    records = blob_service.get_blob_to_text(
        "raw",
        "testprivcmddataflow/WideWorldImporters-Sales/address/SalesLTAddress.txt"
    ).content
    records += "\n" + str(
        key
    ) + ",8713 Yosemite Ct.,,Bothell,Washington,United States,98011,268af621-76d7-4c78-9441-144fd139821a,2006-07-01 00:00:00.0000000"
    blob_service.create_blob_from_text(
        "raw",
        "testprivcmddataflow/WideWorldImporters-Sales/address/SalesLTAddress.txt",
        records)

    # Create event such that ADFv2 is triggered
    blob_service = BlockBlobService("testedlstorgen",
                                    token_credential=credentials)
    blob_service.create_blob_from_text("adftrigger",
                                       "adftrigger" + nowstr + ".txt", "")
def main(msg: func.QueueMessage) -> None:
    body = msg.get_json()
    date = body['date']
    year, month, _ = date.split("T")[0].split("-")
    city = body['city']
    country = body['country']
    temperature = body['temperature']
    # Obtain HOST and MASTER_KEY from: https://portal.azure.com/#@[user_email]/resource/subscriptions/[subscription_id]/resourceGroups/[resource_group_name]/providers/Microsoft.DocumentDb/databaseAccounts/[db_account_name]/keys
    HOST = "//TODO"
    MASTER_KEY = "//TODO"
    # construct your own doc_link and document as needed. This is a sample.
    # Please see the readme for details regarding the code below.
    client = cosmos_client.CosmosClient(HOST, {'masterKey': MASTER_KEY})
    database_link = 'dbs/' + 'weather-data'
    collection_link = database_link + '/colls/' + 'weather-data'
    doc_id = year + "-" + month + "___" + city + "-" + country
    doc_link = collection_link + '/docs/' + doc_id
    try:
        document = {
            'id': doc_id,
            'city': city,
            'country': country,
            'Temperature_List': [str(temperature)],
            'Month': month,
            'Year': year
        }
        client.CreateItem(collection_link, document)
    except:
        response = client.ReadItem(doc_link)
        response["Temperature_List"].append(str(temperature))
        client.UpsertItem(collection_link, response)
    logging.info('Python queue trigger function processed a queue item:')
def main(msg: func.QueueMessage) -> None:
    """
    Azure Storage Queue をトリガとする関数。キューにメッセージが追加されたときに
    起動され、メッセージを処理できる。
    """
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))
def main(msg: func.QueueMessage, inputblob: func.InputStream,
         outputblob: func.Out[func.InputStream]) -> None:

    blob_source_raw_name = msg.get_body().decode('utf-8')
    logging.info('Python queue trigger function processed a queue item: %s',
                 blob_source_raw_name)

    # thumbnail filename
    local_file_name_thumb = blob_source_raw_name[:-4] + "_thumb.jpg"

    #####
    # Download file from Azure Blob Storage
    #####
    with open(blob_source_raw_name, "w+b") as local_blob:
        local_blob.write(inputblob.read())

    #####
    # Use PIL to create a thumbnail
    #####
    new_size = 200, 200
    im = Image.open(local_blob.name)
    im.thumbnail(new_size)
    im.save(local_file_name_thumb, quality=95)

    # write the stream to the output file in blob storage
    new_thumbfile = open(local_file_name_thumb, "rb")
    outputblob.set(new_thumbfile.read())
예제 #17
0
def main(msg: func.QueueMessage) -> None:
    queue_client = QueueClient.from_connection_string(
        os.environ['AzureWebJobsStorage'],
        'oat-queue',
        message_encode_policy=TextBase64EncodePolicy(),
    )
    queue_client.send_message(msg.get_body().decode(), visibility_timeout=3600)
예제 #18
0
def main(msg: func.QueueMessage) -> None:
    """
    Entry point for this Azure Function.
    """

    message_content: str = msg.get_body().decode('utf-8')
    logging.info('Python queue trigger function processed a queue item: %s',
                 message_content)

    try:
        # Extract batch info from queue message
        dencoded_batch = jsonpickle.decode(message_content)
        json_value: dict = json.loads(dencoded_batch)

        # Validate batch
        blob_service_client = BlobServiceClient.from_connection_string(
            os.getenv('DataStorage'))
        container_client = blob_service_client.get_container_client(
            os.getenv('DataContainer'))
        blob_client: BlobStorageClient = BlobStorageClient(container_client)
        batch_validation: BatchValidation = BatchValidation(
            blob_client, json_value)
        batch_validation.validate()

        logging.info('Done validating batch')

    except Exception as ex:
        logging.exception('EXCEPTION while processing queue item: %s',
                          message_content,
                          exc_info=ex)
예제 #19
0
파일: __init__.py 프로젝트: sshyran/onefuzz
def main(msg: func.QueueMessage, dashboard: func.Out[str]) -> None:
    body = msg.get_body()
    update = Update.parse_obj(json.loads(body))
    execute_update(update)

    events = get_events()
    if events:
        dashboard.set(events)
예제 #20
0
def main(msg: func.QueueMessage, dashboard: func.Out[str]) -> None:
    body = msg.get_body()
    obj = WebhookMessageQueueObj.parse_obj(json.loads(body))
    WebhookMessageLog.process_from_queue(obj)

    events = get_events()
    if events:
        dashboard.set(events)
예제 #21
0
def main(msg: func.QueueMessage) -> None:
    """
    Main function, triggered by Azure Storage Queue, parsed queue content
    :param msg: func.QueueMessage
    :return: None
    """
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))
    get_config_values()

    # Get blob file content
    content = json.loads(msg.get_body().decode('utf-8'))
    filepath = content['data']['url']

    container_name, blob_file_path = get_blob_info_from_url(filepath)
    dest_container_name, dest_blob_file_path = get_new_blob_move_file_path(container_name, blob_file_path)
    retry_times = get_blob_retry_times(filepath)
    retry_times += 1

    # Initialize Track Event/Metrics to App insight
    tc = TelemetryClient(APP_INSIGHT_KEY)
    tc.context.application.ver = '1.0'
    tc.context.properties["PROCESS_PROGRAM"] = "XDR_SDL_INGESTION_ERR_HANDLER_V01A"
    tc.context.properties["PROCESS_START"] = time.time()

    # Do retry (move file to retry folder)
    # TODO: Should filter out the non-retry case
    logging.info("Retry the blob ingest to ADX, blob_path: %s", filepath)
    retry_blob_ingest_to_adx(container_name, blob_file_path, dest_container_name, dest_blob_file_path)

    if retry_times > MAX_INGEST_RETRIES_TIMES:
        logging.error("Retry blob ingest to ADX hit the retries limit %s, blob_path: %s",
                      MAX_INGEST_RETRIES_TIMES, filepath)
        tc.track_event(RETRY_END_IN_FAIL_EVENT_NAME,
                       {'FILE_PATH': filepath},
                       {RETRY_END_IN_FAIL_EVENT_NAME + '_COUNT': 1})
        tc.flush()
        return

    tc.track_event(RETRY_EVENT_NAME,
                   {'FILE_PATH': filepath},
                   {RETRY_EVENT_NAME + '_COUNT': 1})
    tc.flush()

    logging.info("ADX error handler execution succeeded, blob path: %s, trial count: %s",
                 filepath, retry_times)
예제 #22
0
def main(msg: func.QueueMessage, inputblob: func.InputStream, outputblob: func.Out[func.InputStream]) -> None:

    blob_source_raw_name = msg.get_body().decode('utf-8')
    logging.info('Python queue trigger function processed a queue item: %s', blob_source_raw_name)  

    a= inputblob.read()
    logging.info('File Name and Path: %s', a) 
    outputblob.set(a) 
예제 #23
0
def main(msg: func.QueueMessage, inputblob: func.InputStream,
         outputblob: func.Out[func.InputStream]) -> None:
    logging.info('Queue item id:%s, body:%s, expiration_time:%s', msg.id,
                 msg.get_body().decode('utf-8'), msg.expiration_time)
    #https://github.com/Azure/azure-functions-python-worker/issues/576
    # logging.info(f'Python Queue trigger function processed : {inputblob.name}')
    clear_text = inputblob.read()
    logging.info(f'Clear text :{clear_text}')
    outputblob.set(inputblob)
예제 #24
0
def main(msg: func.QueueMessage):

    logging.info('IngestBlobs function processed a request.')

    INGEST_URI = os.environ['KUSTO_INGEST_URI']
    DATABASE = os.environ['KUSTO_DATABASE']
    AAD_TENANT_ID = os.environ['AAD_TENANT_ID']
    APPLICATION_ID = os.environ['APPLICATION_ID']
    APPLICATION_SECRET = os.environ['APPLICATION_SECRET']
    MAPPINGS_FILE = os.environ['MAPPINGS_FILE']
    STORAGE_NAME = os.environ['STORAGE_ACCOUNT_NAME']
    STORAGE_KEY = os.environ['STORAGE_ACCOUNT_KEY']
    CONTAINER = os.environ['DATA_CONTAINER']
    STATUS_TABLE = os.environ['STATUS_TABLE']

    pathMappings = os.path.join(Path.cwd(), MAPPINGS_FILE)
    logging.info("Mappings file path: %s" % pathMappings)

    blobToIngest = None
    try:
        blobToIngest = storage_helpers.createBlobFromMessage(msg.get_body())
        logging.info("Ingesting blob: %s" % str(blobToIngest))
    except Exception as e:
        logging.error("Could not get blobToIngest from queue message: %s" % e)

    ingestKCSB = kusto_helpers.createKustoConnection(INGEST_URI, AAD_TENANT_ID,
                                                     APPLICATION_ID,
                                                     APPLICATION_SECRET)
    kustoClient = None
    if (ingestKCSB != None):
        kustoClient = kusto_helpers.getKustoClient(ingestKCSB)

    tableService = storage_helpers.createTableService(STORAGE_NAME,
                                                      STORAGE_KEY)

    if (kustoClient != None and blobToIngest != None and tableService != None):

        # Ingest blob in ADX
        blobToIngest['format'], blobToIngest['ingestionMapping'], blobToIngest[
            'table'] = kusto_helpers.getMappingsBlob(blobToIngest['name'],
                                                     pathMappings)
        logging.info('Queuing blob %s for ingestion to table %s' %
                     (blobToIngest['name'], blobToIngest['table']))
        additionalProperties = {'ignoreFirstRecord': 'true'}
        kusto_helpers.ingestBlob(kustoClient, DATABASE, blobToIngest,
                                 additionalProperties)
        # Update blob status in status table to 'ingested'
        newBlobStatus = {
            'PartitionKey': CONTAINER,
            'RowKey': blobToIngest['name'],
            'status': 'ingested'
        }
        storage_helpers.insertOrMergeEntity(tableService, STATUS_TABLE,
                                            newBlobStatus)

    else:
        logging.warning("Did not ingest blob successfully.")
def main(msg: func.QueueMessage) -> None:
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))

    #load data from json string
    data = json.loads(msg.get_body().decode('utf-8'))
    if 'provision-zoom' in data['command']:
        group_url = data['text']
        result = create_zoom_account(group_url)

        #notify slack that this was done...
        msgtext = f"Provision Zoom access for {group_url} result: {result}"
        response_url = data['response_url']
        headers = {'Content-type': 'application/json'}
        msgdata = {'text': msgtext, 'response_type': 'ephemeral'}
        requests.post(response_url, data=json.dumps(msgdata), headers=headers)
    else:
        logging.info(f"Request was not a provision-zoom command: {data}")
def main(msg: func.QueueMessage) -> func.HttpResponse:
    tweets_enabled = os.environ["EnableTweets"]

    message = msg.get_body().decode("utf-8")

    if tweets_enabled == "true":
        tweet(message)
        logging.info(f'Tweeted!')
    else:
        logging.info(f'Would have tweeted message:\n{message}')
예제 #27
0
def main(msgIn: func.QueueMessage, msgOut: func.Out[func.QueueMessage]):
    try:
        args = json.loads(msgIn.get_body())
    except Exception:
        args = msgIn.get_json()

    os.environ['__LITHOPS_ACTIVATION_ID'] = str(msgIn.id)
    setup_lithops_logger(args['log_level'])

    if 'get_preinstalls' in args:
        logger.info("Lithops v{} - Generating metadata".format(__version__))
        runtime_meta = get_runtime_preinstalls()
        msgOut.set(json.dumps(runtime_meta))
    elif 'remote_invoker' in args:
        logger.info("Lithops v{} - Starting invoker".format(__version__))
        function_invoker(args)
    else:
        logger.info("Lithops v{} - Starting execution".format(__version__))
        function_handler(args)
예제 #28
0
def main(msg: func.QueueMessage, dashboard: func.Out[str]) -> None:
    body = msg.get_body()
    logging.info("heartbeat: %s", body)

    raw = json.loads(body)
    Heartbeat.add(HeartbeatEntry.parse_obj(raw))

    event = get_event()
    if event:
        dashboard.set(event)
def main(msg: func.QueueMessage) -> str:
    image_url = msg.get_body().decode('utf-8')
    results = predict_image_from_url(image_url)
    logging.info(f"{results['predictedTagName']} {image_url}")
    return json.dumps({
        'target': 'newResult',
        'arguments': [{
            'predictedTagName': results['predictedTagName'],
            'url': image_url
        }]
    })
예제 #30
0
def main(msg: func.QueueMessage) -> None:
    body = msg.get_body().decode('utf-8')
    body_json = json.loads(body)
    table_service = TableService(connection_string=os.environ["TableStorage"])

    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))

    task = Entity()
    task.PartitionKey = body_json["party"]
    task.RowKey = str(uuid.uuid4())
    task.count = body_json["count"]
    task.electoralPlace = body_json["electoralPlace"]
    task.electoralUnit = body_json["electoralUnit"]

    table_service.insert_entity('votes', task)

    # datetime object containing current date and time
    now = datetime.now()
    logging.info(now.strftime("%d/%m/%Y %H:%M:%S") + ' - Processing done')
예제 #31
0
def main(msg: func.QueueMessage) -> None:
    logging.info('Python queue trigger function processed a queue item: %s',
                 msg.get_body().decode('utf-8'))