def main(msg: func.QueueMessage) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) personstring = msg.get_body().decode('utf-8') person = json.loads(personstring) graphclient = createclient() query = "g.addV('profile').property('id', '" + person[ 'id'] + "').property('name', '" + person[ 'name'] + "').property('city', '" + person['city'] + "')" insert_vertices(graphclient, query) try: connections = person['connections'] except KeyError: connections = None if connections != None: for connection in connections: query = "g.V('" + person['id'] + "').addE('" + connection[ 'relationship'] + "').to(g.V('" + connection[ 'relatedperson'] + "'))" insert_edges(client, query)
def main(msg: func.QueueMessage) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) decoded_message = msg.get_body().decode('utf-8') try: export_job_details = json.loads(decoded_message) export_job_id = export_job_details[ 'exportJobId'] if 'exportJobId' in export_job_details else '' chunk_id = export_job_details[ 'chunkId'] if 'chunkId' in export_job_details else '' if export_job_id == '' or chunk_id == '': logging.warn('missing information to process a chunk') logging.warn(f'message sent - {decoded_message}') logging.warn( f'cannot process without export job ID and chunk ID -- found job ID {export_job_id} - chunk ID {chunk_id}' ) logging.warn('Removing from asset poison queue') return assets_table = ExportsTableStore(connection_string, assets_table_name) if assets_table.get(export_job_id, chunk_id) is not None: assets_table.merge(export_job_id, str(chunk_id), {'jobStatus': TenableStatus.failed.value}) return except Exception as e: logging.warn('Could not process job or chunk') logging.warn(f'Raised this exception {e}') logging.warn('Removing from asset poison queue') return
def main(msg: func.QueueMessage) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) blob_name = msg.get_body().decode('utf-8') connection_string = os.environ["snibirkedastor_STORAGE"] blob_service_client = BlobServiceClient.from_connection_string( connection_string) start_time = time.perf_counter() try: blob = BlobClient.from_connection_string(conn_str=connection_string, container_name="grayscale", blob_name=blob_name) data = blob.download_blob().content_as_bytes(max_concurrency=1) dt = np.fromstring(data, dtype='uint8') gray = cv2.imdecode(dt, cv2.IMREAD_UNCHANGED) edges = cv2.Canny(gray, 60, 120) pil_image = Image.fromarray(edges) img_byte_arr = BytesIO() pil_image.save(img_byte_arr, format='JPEG') img_byte_arr = img_byte_arr.getvalue() #upload canny container_name = "canny" container_client = blob_service_client.get_container_client( container_name) container_client.upload_blob(name=blob.blob_name, data=img_byte_arr) except: pass end_time = time.perf_counter() logging.info( f"Container Name: {container_name}, Blob Name: {blob.blob_name}, Time Taken: {end_time - start_time}" )
def main(msg: func.QueueMessage, context: func.Context) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) data = json.loads(msg.get_body().decode('utf-8')) if data['type'] == 'view_submission': view_id = data['view']['id'] values = data['view']['state']['values'] process_form(values, view_id, context.function_directory)
async def main(msg: func.QueueMessage, starter: str) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) client = df.DurableOrchestrationClient(starter) instance_id = await client.start_new( orchestration_function_name="Orchestrator", instance_id=None, client_input=msg.get_body().decode('utf-8') )
def main(msg: func.QueueMessage) -> None: logging.info('Python firstline request made: %s', msg.get_body().decode('utf-8')) user = json.loads(msg.get_body().decode('utf-8')) # Authenticate with the Microsoft Graph authenticate() # Add user to Azure AD id = addUser(user) # Add employee to AD groups addUserToGroup(id)
def main(eeimsg: func.QueueMessage) -> None: detection = json.loads(eeimsg.get_body().decode('utf-8')) logging.info(f"Queue trigger function processed item: {detection['id']}") # Set variables base_url = os.environ['baseUrl'] username = os.environ['eeiUsername'] password = os.environ['eeiPassword'] domain = bool(strtobool(os.environ['domainLogin'])) verify = bool(strtobool(os.environ['verifySsl'])) workspace_id = os.environ['workspaceId'] workspace_key = os.environ['workspaceKey'] log_type = 'ESETEnterpriseInspector' # Connect to ESET Enterprise Inspector server ei = EnterpriseInspector( base_url=base_url, username=username, password=password, domain=domain, verify=verify ) # Get detection details detection_details = ei.detection_details(detection) # Send data via data collector API body = json.dumps(detection_details) post_data( customer_id=workspace_id, shared_key=workspace_key, body=body, log_type=log_type )
def main(msg: func.QueueMessage, contentStream: func.InputStream, gsheetQueue: func.Out[str], statsQueue: func.Out[str]) -> None: conn_string = os.environ["DBConnectionString"] # Only process SLO files if not msg.get_body().decode("utf-8").endswith("-slo.txt"): logging.error("Unrecognized file name. Skipping.") return try: content = contentStream.read() data = slo_file_parser.parse_slo_file(content) logging.info("Parsed SLO file.") except Exception as e: logging.error("Failed to parse SLO file.") logging.error(e) raise save_db(data, conn_string) statsQueue.set("Database Updated") logging.info("Added entry to stats queue.") gsheetQueue.set(json.dumps(data)) logging.info("Added entry to GSheet queue.")
def main(msg: func.QueueMessage) -> None: # we need a connection to arcgis, so start there gis = GIS(username=config.arcgis_username, password=config.arcgis_password) logging.info(f'Connected to GIS at {gis.url}.') # since the input message content is a single feature, extract the reach id from the feature feature_json = json.loads(msg.get_body()) reach_id = feature_json['attributes']['reach_id'] logging.info(f'Preparing to update reach id {reach_id}.') # create a reach object to work with reach = Reach.get_from_aw(reach_id) logging.info(f'Retrieved reach id {reach_id} from AW.') # do the hard work, trace it reach.snap_putin_and_takeout_and_trace(gis=gis) logging.info(f'Successfully traced {reach_id}.') # create layers to be updated lyr_centroid = ReachFeatureLayer(config.url_reach_centroid, gis) lyr_line = ReachFeatureLayer(config.url_reach_line, gis) lyr_points = ReachPointFeatureLayer(config.url_reach_points, gis) # update ArcGIS Online reach.publish_updates(lyr_line, lyr_centroid, lyr_points) logging.info(f'Successfully updated {reach_id} on ArcGIS Online.')
def main(msg: func.QueueMessage) -> None: """ Basically the startpoint for the whole application, parses the received event and calls the relevant functions. """ configurer = azops.Configurer(options=azops.Options.from_env(), credentials=DefaultAzureCredential()) qmsg = msg.get_body().decode("utf-8") event = UpdateEvent.parse_raw(qmsg) # Make sure there's at least one config if event.next_config is None and event.previous_config is None: logging.warning("No next or previous configuration") return if event.next_config: configurer.create_or_update_client_config( azops.CreateOrUpdateEvent( id=event.id, previous_config=event.previous_config, next_config=event.next_config, )) return if event.previous_config: configurer.delete_client_config( azops.DeleteEvent( id=event.id, previous_config=event.previous_config, )) return
def main(msg: func.QueueMessage) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) # get bearer token and authenticate to ADLSgen2 using Managed Identity of Azure Function credentials = MSIAuthentication(resource='https://storage.azure.com/') blob_service = BlockBlobService("testedlstorgen", token_credential=credentials) # get timestamp now = datetime.now() nowstr = datetime.strftime(datetime.now(), "%Y%m%dT%H%M%S%Z") key = round((now - datetime(2019, 1, 1, 0, 0, 0)).total_seconds()) logging.info("key: " + str(key)) # Add record to csv file. Notice that AppendBlob is not yet supported on ADLSgen2, see https://docs.microsoft.com/en-us/azure/storage/blobs/data-lake-storage-known-issues records = blob_service.get_blob_to_text( "raw", "testprivcmddataflow/WideWorldImporters-Sales/address/SalesLTAddress.txt" ).content records += "\n" + str( key ) + ",8713 Yosemite Ct.,,Bothell,Washington,United States,98011,268af621-76d7-4c78-9441-144fd139821a,2006-07-01 00:00:00.0000000" blob_service.create_blob_from_text( "raw", "testprivcmddataflow/WideWorldImporters-Sales/address/SalesLTAddress.txt", records) # Create event such that ADFv2 is triggered blob_service = BlockBlobService("testedlstorgen", token_credential=credentials) blob_service.create_blob_from_text("adftrigger", "adftrigger" + nowstr + ".txt", "")
def main(msg: func.QueueMessage) -> None: """ Azure Storage Queue をトリガとする関数。キューにメッセージが追加されたときに 起動され、メッセージを処理できる。 """ logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8'))
def main(msg: func.QueueMessage) -> None: queue_client = QueueClient.from_connection_string( os.environ['AzureWebJobsStorage'], 'oat-queue', message_encode_policy=TextBase64EncodePolicy(), ) queue_client.send_message(msg.get_body().decode(), visibility_timeout=3600)
def main(msg: func.QueueMessage) -> None: """ Entry point for this Azure Function. """ message_content: str = msg.get_body().decode('utf-8') logging.info('Python queue trigger function processed a queue item: %s', message_content) try: # Extract batch info from queue message dencoded_batch = jsonpickle.decode(message_content) json_value: dict = json.loads(dencoded_batch) # Validate batch blob_service_client = BlobServiceClient.from_connection_string( os.getenv('DataStorage')) container_client = blob_service_client.get_container_client( os.getenv('DataContainer')) blob_client: BlobStorageClient = BlobStorageClient(container_client) batch_validation: BatchValidation = BatchValidation( blob_client, json_value) batch_validation.validate() logging.info('Done validating batch') except Exception as ex: logging.exception('EXCEPTION while processing queue item: %s', message_content, exc_info=ex)
def main(msg: func.QueueMessage, inputblob: func.InputStream, outputblob: func.Out[func.InputStream]) -> None: blob_source_raw_name = msg.get_body().decode('utf-8') logging.info('Python queue trigger function processed a queue item: %s', blob_source_raw_name) # thumbnail filename local_file_name_thumb = blob_source_raw_name[:-4] + "_thumb.jpg" ##### # Download file from Azure Blob Storage ##### with open(blob_source_raw_name, "w+b") as local_blob: local_blob.write(inputblob.read()) ##### # Use PIL to create a thumbnail ##### new_size = 200, 200 im = Image.open(local_blob.name) im.thumbnail(new_size) im.save(local_file_name_thumb, quality=95) # write the stream to the output file in blob storage new_thumbfile = open(local_file_name_thumb, "rb") outputblob.set(new_thumbfile.read())
def main(msg: func.QueueMessage, dashboard: func.Out[str]) -> None: body = msg.get_body() obj = WebhookMessageQueueObj.parse_obj(json.loads(body)) WebhookMessageLog.process_from_queue(obj) events = get_events() if events: dashboard.set(events)
def main(msg: func.QueueMessage) -> None: """ Main function, triggered by Azure Storage Queue, parsed queue content :param msg: func.QueueMessage :return: None """ logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) get_config_values() # Get blob file content content = json.loads(msg.get_body().decode('utf-8')) filepath = content['data']['url'] container_name, blob_file_path = get_blob_info_from_url(filepath) dest_container_name, dest_blob_file_path = get_new_blob_move_file_path(container_name, blob_file_path) retry_times = get_blob_retry_times(filepath) retry_times += 1 # Initialize Track Event/Metrics to App insight tc = TelemetryClient(APP_INSIGHT_KEY) tc.context.application.ver = '1.0' tc.context.properties["PROCESS_PROGRAM"] = "XDR_SDL_INGESTION_ERR_HANDLER_V01A" tc.context.properties["PROCESS_START"] = time.time() # Do retry (move file to retry folder) # TODO: Should filter out the non-retry case logging.info("Retry the blob ingest to ADX, blob_path: %s", filepath) retry_blob_ingest_to_adx(container_name, blob_file_path, dest_container_name, dest_blob_file_path) if retry_times > MAX_INGEST_RETRIES_TIMES: logging.error("Retry blob ingest to ADX hit the retries limit %s, blob_path: %s", MAX_INGEST_RETRIES_TIMES, filepath) tc.track_event(RETRY_END_IN_FAIL_EVENT_NAME, {'FILE_PATH': filepath}, {RETRY_END_IN_FAIL_EVENT_NAME + '_COUNT': 1}) tc.flush() return tc.track_event(RETRY_EVENT_NAME, {'FILE_PATH': filepath}, {RETRY_EVENT_NAME + '_COUNT': 1}) tc.flush() logging.info("ADX error handler execution succeeded, blob path: %s, trial count: %s", filepath, retry_times)
def main(msg: func.QueueMessage, inputblob: func.InputStream, outputblob: func.Out[func.InputStream]) -> None: blob_source_raw_name = msg.get_body().decode('utf-8') logging.info('Python queue trigger function processed a queue item: %s', blob_source_raw_name) a= inputblob.read() logging.info('File Name and Path: %s', a) outputblob.set(a)
def main(msg: func.QueueMessage, dashboard: func.Out[str]) -> None: body = msg.get_body() update = Update.parse_obj(json.loads(body)) execute_update(update) events = get_events() if events: dashboard.set(events)
def main(msg: func.QueueMessage, inputblob: func.InputStream, outputblob: func.Out[func.InputStream]) -> None: logging.info('Queue item id:%s, body:%s, expiration_time:%s', msg.id, msg.get_body().decode('utf-8'), msg.expiration_time) #https://github.com/Azure/azure-functions-python-worker/issues/576 # logging.info(f'Python Queue trigger function processed : {inputblob.name}') clear_text = inputblob.read() logging.info(f'Clear text :{clear_text}') outputblob.set(inputblob)
def main(msg: func.QueueMessage): logging.info('IngestBlobs function processed a request.') INGEST_URI = os.environ['KUSTO_INGEST_URI'] DATABASE = os.environ['KUSTO_DATABASE'] AAD_TENANT_ID = os.environ['AAD_TENANT_ID'] APPLICATION_ID = os.environ['APPLICATION_ID'] APPLICATION_SECRET = os.environ['APPLICATION_SECRET'] MAPPINGS_FILE = os.environ['MAPPINGS_FILE'] STORAGE_NAME = os.environ['STORAGE_ACCOUNT_NAME'] STORAGE_KEY = os.environ['STORAGE_ACCOUNT_KEY'] CONTAINER = os.environ['DATA_CONTAINER'] STATUS_TABLE = os.environ['STATUS_TABLE'] pathMappings = os.path.join(Path.cwd(), MAPPINGS_FILE) logging.info("Mappings file path: %s" % pathMappings) blobToIngest = None try: blobToIngest = storage_helpers.createBlobFromMessage(msg.get_body()) logging.info("Ingesting blob: %s" % str(blobToIngest)) except Exception as e: logging.error("Could not get blobToIngest from queue message: %s" % e) ingestKCSB = kusto_helpers.createKustoConnection(INGEST_URI, AAD_TENANT_ID, APPLICATION_ID, APPLICATION_SECRET) kustoClient = None if (ingestKCSB != None): kustoClient = kusto_helpers.getKustoClient(ingestKCSB) tableService = storage_helpers.createTableService(STORAGE_NAME, STORAGE_KEY) if (kustoClient != None and blobToIngest != None and tableService != None): # Ingest blob in ADX blobToIngest['format'], blobToIngest['ingestionMapping'], blobToIngest[ 'table'] = kusto_helpers.getMappingsBlob(blobToIngest['name'], pathMappings) logging.info('Queuing blob %s for ingestion to table %s' % (blobToIngest['name'], blobToIngest['table'])) additionalProperties = {'ignoreFirstRecord': 'true'} kusto_helpers.ingestBlob(kustoClient, DATABASE, blobToIngest, additionalProperties) # Update blob status in status table to 'ingested' newBlobStatus = { 'PartitionKey': CONTAINER, 'RowKey': blobToIngest['name'], 'status': 'ingested' } storage_helpers.insertOrMergeEntity(tableService, STATUS_TABLE, newBlobStatus) else: logging.warning("Did not ingest blob successfully.")
def main(msg: func.QueueMessage) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) #load data from json string data = json.loads(msg.get_body().decode('utf-8')) if 'provision-zoom' in data['command']: group_url = data['text'] result = create_zoom_account(group_url) #notify slack that this was done... msgtext = f"Provision Zoom access for {group_url} result: {result}" response_url = data['response_url'] headers = {'Content-type': 'application/json'} msgdata = {'text': msgtext, 'response_type': 'ephemeral'} requests.post(response_url, data=json.dumps(msgdata), headers=headers) else: logging.info(f"Request was not a provision-zoom command: {data}")
def main(msg: func.QueueMessage, dashboard: func.Out[str]) -> None: body = msg.get_body() logging.info("heartbeat: %s", body) raw = json.loads(body) Heartbeat.add(HeartbeatEntry.parse_obj(raw)) event = get_event() if event: dashboard.set(event)
def main(msg: func.QueueMessage) -> func.HttpResponse: tweets_enabled = os.environ["EnableTweets"] message = msg.get_body().decode("utf-8") if tweets_enabled == "true": tweet(message) logging.info(f'Tweeted!') else: logging.info(f'Would have tweeted message:\n{message}')
def main(msg: func.QueueMessage) -> None: body = msg.get_body().decode('utf-8') body_json = json.loads(body) table_service = TableService(connection_string=os.environ["TableStorage"]) logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) task = Entity() task.PartitionKey = body_json["party"] task.RowKey = str(uuid.uuid4()) task.count = body_json["count"] task.electoralPlace = body_json["electoralPlace"] task.electoralUnit = body_json["electoralUnit"] table_service.insert_entity('votes', task) # datetime object containing current date and time now = datetime.now() logging.info(now.strftime("%d/%m/%Y %H:%M:%S") + ' - Processing done')
def main(msg: func.QueueMessage): logging.info(f"Python queue-triggered function received a message!") message = msg.get_body().decode('utf-8') logging.info(f"Message: {message}") # Publish an event url = f'{dapr_url}/publish/myTopic' content = {"message": message} logging.info(f'POST to {url} with content {json.dumps(content)}') p = requests.post(url, json=content) logging.info(f'Got response code {p.status_code}')
def main(msg: func.QueueMessage) -> None: event = json.loads(msg.get_body()) last_try = msg.dequeue_count == MAX_DEQUEUE_COUNT # check type first before calling Azure APIs if event["eventType"] != "Microsoft.Storage.BlobCreated": return if event["topic"] not in corpus_accounts(): return file_added(event, last_try)
def main(msg: func.QueueMessage) -> str: image_url = msg.get_body().decode('utf-8') results = predict_image_from_url(image_url) logging.info(f"{results['predictedTagName']} {image_url}") return json.dumps({ 'target': 'newResult', 'arguments': [{ 'predictedTagName': results['predictedTagName'], 'url': image_url }] })
def main(msg: func.QueueMessage) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8')) # get raw parameters raw = msg.get_body().decode('utf-8') logging.info(raw) msg_json =json.loads(raw) # parse parameters storage_account_source = os.environ["par_storage_account_name_source"] storage_account_source_url = "https://" + storage_account_source + ".blob.core.windows.net" storage_account_backup = os.environ["par_storage_account_name_backup"] storage_account_backup_url = "https://" + storage_account_backup + ".blob.core.windows.net" container_source = msg_json['container'] container_backup = msg_json['container'] + "bak" blob_name = msg_json["blob_name"] blob_etag = msg_json["etag"] # create blob client for backup and source credential = DefaultAzureCredential() client_backup = BlobServiceClient(account_url=storage_account_backup_url, credential=credential) blob_backup = client_backup.get_blob_client(container=container_backup, blob=blob_name) client_source = BlobServiceClient(account_url=storage_account_source_url, credential=credential) blob_source = client_source.get_blob_client(container=container_source, blob=blob_name) create_container_backup_if_not_exists(client_backup, container_backup) # check if source blob is not changed if source_blob_changed(blob_source, blob_name, container_source, blob_etag) == True: return # Start copying using ADFv2 try: if not USING_BLOB_LEASE: copy_adf_blob_source_backup(blob_source, client_backup, blob_backup, blob_etag) else: # Copy with blob lease locks the file copy_with_lease(blob_source, client_backup, blob_backup, blob_etag) except: logging.info("copy failed")
def main(msg: func.QueueMessage) -> None: body = msg.get_body() logging.info(PROXY_LOG_PREFIX + "heartbeat: %s", body) raw = json.loads(body) heartbeat = ProxyHeartbeat.parse_obj(raw) proxy = Proxy.get(heartbeat.region, heartbeat.proxy_id) if proxy is None: logging.warning( PROXY_LOG_PREFIX + "received heartbeat for missing proxy: %s", body) return proxy.heartbeat = heartbeat proxy.save()
def main(msg: func.QueueMessage) -> None: logging.info('Python queue trigger function processed a queue item: %s', msg.get_body().decode('utf-8'))