def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) data = event.get_json() logging.info(f'EventGrid Data: {json.dumps(data)}') if event.event_type == "Microsoft.MachineLearningServices.ModelRegistered": model = str(data["modelName"]) if "seer" in model.lower(): version = data["modelVersion"] ghSha = data["modelTags"]["github_ref"] ghUri = "https://api.github.com/repos/cloudscaleml/seer/dispatches" logging.info('Callin GitHub hook') response = seer_registered(GitHubToken, ghUri, model, version, ghSha) logging.info( f'GitHub Response: {response.status_code}, Text: {response.text}' )
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) blob_service_client = BlobServiceClient.from_connection_string( os.environ.get('ORIGIN_STORAGE_CONNECTION_STRING')) # Create a retention policy to retain deleted blobs delete_retention_policy = RetentionPolicy(enabled=True, days=1) # Set the retention policy on the service blob_service_client.set_service_properties( delete_retention_policy=delete_retention_policy) # Blob info to delete blob_url = event.get_json().get('url') container_name = blob_url.split("/")[-2].split("?")[0].split("-")[0] blob_name = blob_url.split("/")[-1].split("?")[0] blob_to_delete = blob_service_client.get_blob_client( container=container_name, blob=blob_name) blob_to_delete.delete_blob()
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) blob_service_client = BlobServiceClient.from_connection_string( os.environ.get('ARCHIVE_STORAGE_CONNECTION_STRING')) # Get the URL and extract the name of the file and container blob_url = event.get_json().get('url') logging.info('blob URL: %s', blob_url) blob_name = blob_url.split("/")[-1].split("?")[0] container_name = blob_url.split("/")[-2].split("?")[0] archived_container_name = container_name + '-' + os.environ.get( 'AZURE_STORAGE_ARCHIVE_CONTAINER') blob_service_client_origin = BlobServiceClient.from_connection_string( os.environ.get('ORIGIN_STORAGE_CONNECTION_STRING')) blob_to_copy = blob_service_client_origin.get_blob_client( container=container_name, blob=blob_name) sas_token = generate_blob_sas( blob_to_copy.account_name, blob_to_copy.container_name, blob_to_copy.blob_name, account_key=blob_service_client_origin.credential.account_key, permission=BlobSasPermissions(read=True), start=datetime.utcnow() + timedelta(seconds=1), expiry=datetime.utcnow() + timedelta(hours=1)) logging.info('sas token: %s', sas_token) archived_container = blob_service_client.get_container_client( archived_container_name) # Create new Container try: archived_container.create_container() except ResourceExistsError: pass copied_blob = blob_service_client.get_blob_client(archived_container_name, blob_name) blob_to_copy_url = blob_url + '?' + sas_token logging.info('blob url: ' + blob_to_copy_url) # Start copy copied_blob.start_copy_from_url(blob_to_copy_url)
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) # get service principal from env variables sp_auth = ServicePrincipalAuthentication( tenant_id=os.getenv('TENANT_ID', ''), service_principal_id=os.getenv('SP_ID', ''), service_principal_password=os.getenv('SP_PASSWORD', '')) # parse azure subscription ID, resource group name, and ML workspace name from event grid event topic sub_tag = "subscriptions" rg_tag = "resourceGroups" ws_provider_tag = "providers/Microsoft.MachineLearningServices/workspaces" subscription_id = event.topic.split("{}/".format(sub_tag), 1)[1].split("/{}".format(rg_tag), 1)[0] resource_group_name = event.topic.split("{}/".format(rg_tag), 1)[1].split( "/{}".format(ws_provider_tag), 1)[0] workspace_name = event.topic.split("{}/".format(ws_provider_tag), 1)[1].split("/", 1)[0] # get workspace ws = Workspace.get(name=workspace_name, auth=sp_auth, subscription_id=subscription_id, resource_group=resource_group_name) logging.info( 'SubscriptionID = %s; ResourceGroup = %s; WorkSpace = %s; Location = %s', ws.subscription_id, ws.resource_group, ws.name, ws.location) # get model from event data event_data = event.get_json() model_id = '{}:{}'.format(event_data['modelName'], event_data['modelVersion']) model = Model(ws, id=model_id) logging.info('Model name = %s', model.name) # perform no code deploy, in a fire-n-forget way, as we don't need to hold the Functions App resource and we will # respond to event grid request in time so that Event Grid won't timeout and retry. service_name = 'acitest-{}-{}'.format(event_data['modelName'], event_data['modelVersion']) service = Model.deploy(ws, service_name, [model]) logging.info('Start deploying service %s to ACI', service.name)
def main(event: func.EventGridEvent): logging.info('version: %s', 'v0.1.0') result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) cfgPath = pathlib.Path(__file__).parent / "config.json" geoEventPost = postgeoeventdata.PostGeoEventData(cfgPath) geoEventPost.processGeoEventPost(event.get_json()["destinationUrl"])
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('{} Python EventGrid trigger processed an event:{}'.format(LOG_MESSAGE_HEADER ,result)) get_config_values() regexp = re.compile(EVENT_SUBJECT_FILTER_REGEX) if regexp.search(event.subject): # Check if file path match criteria tc = TelemetryClient(APP_INSIGHT_ID) tc.context.application.ver = '1.0' tc.context.properties["PROCESS_PROGRAM"]=PROCESS_PROGRAM_NAME tc.context.properties["PROCESS_START"]=time.time() tc.track_trace('{} STRAT RUN EVENTGRID INGEST TELEMETRY JSON DATA from folder {} '.format(LOG_MESSAGE_HEADER,result)) tc.flush() configfile_block_blob_service = BlockBlobService(account_name=CONFIG_FILE_BLOB_ACCOUNT, account_key=CONFIG_FILE_BLOB_KEY) cleanfile_block_blob_service = BlockBlobService(account_name=CLEAN_FILE_BLOB_ACCOUNT, account_key=CLEAN_FILE_BLOB_KEY) filepath=get_file_path(event.subject) container_name=get_container_name(event.subject) config_file_name=get_filename(filepath) logging.info('{} filepath: {}'.format(LOG_MESSAGE_HEADER,filepath)) logging.info('{} container_name: {}'.format(LOG_MESSAGE_HEADER, container_name)) config_file_name_utf8, filesize,vm_uuid,deploy_uuid,config_uuid=generate_UTF8_config_json(configfile_block_blob_service,CONFIG_FILE_CONTAINER,filepath,cleanfile_block_blob_service,CLEAN_FILE_CONTAINER ) ingest_to_ADX(config_file_name_utf8, cleanfile_block_blob_service, CLEAN_FILE_CONTAINER, CLEAN_FILE_BLOB_ACCOUNT,filesize,tc,vm_uuid,deploy_uuid,config_uuid) else: logging.info("{} Subject : {} does not match regular express {}. Skip process. ".format(LOG_MESSAGE_HEADER,event.subject,EVENT_SUBJECT_FILTER_REGEX))
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) result_obj = json.loads(result) eventid = result_obj['id'] base = result_obj['data']['base'] exp = result_obj['data']['exp'] total = result_obj['data']['total'] run_num = result_obj['data']['runId'] val = base**exp result_data = f"Base: {base}, Exp: {exp}, Total: {total}, Val: {val}, Run: {run_num}" logging.warn( f'Python EventGrid trigger processed an event: {result}, result_data: {result_data}' ) # derived from application property host = os.environ.get("redishost") key = os.environ.get("rediskey") r = redis.StrictRedis(host=host, port=6380, db=0, password=key, ssl=True) ping_result = r.ping() logging.warn("Ping returned : " + str(ping_result)) record_key = str(run_num) + "-" + str(eventid) r.set(record_key, val) '''# get the keys for this run
async def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) try: result = json.loads(result) data = result['data'] url = urlparse(data['url']) p_file = Path(url.path) filename = p_file.name container = p_file.parent.name connection = os.environ['STORAGE_CONNECTION'] async with BlobClient.from_connection_string( connection, container_name=container, blob_name=filename) as blob: stream = await blob.download_blob() data = await stream.content_as_text() logging.info(f'Content is {data}') except Exception as e: logging.exception(f"failed to load EventGrid request:{e}")
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) message = event.subject registry = message.split(":")[0] version = message.split(":")[1] slack_message = ":star::star:Image version: {version} pushed to {registry} :star::star:".format( version=version, registry=registry) data = [] data.append({ "type": "section", "text": { "type": "mrkdwn", "text": slack_message }, "accessory": { "type": "image", "image_url": "http://i.giphy.com/4AC11GmQzFVKg.gif", "alt_text": "Deployed image" } }) slack_data = json.dumps({'blocks': data}) logging.info('Python EventGrid trigger processed an event: %s', result) url = os.environ["SLACK_IMAGE_PUSH"] if event.event_type == "Microsoft.ContainerRegistry.ImagePushed": logging.info(message) r = requests.post(url, data=slack_data) logging.info(r.status_code)
def main(event: func.EventGridEvent) -> str: return json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, })
def main(event: func.EventGridEvent): logger = logging.getLogger("logger_name") logger.disabled = True # Must be here to re-initialize variables every time from . import buildmsgs_and_export result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) fileName = event.subject.split('/')[-1] updateImage = False if '--update-image' in fileName: updateImage = True wzID = fileName.replace('path-data', '').replace('.csv', '').replace('--update-image', '') csv_path = tempfile.gettempdir() + '/path-data.csv' config_path = tempfile.gettempdir() + '/config.json' # parameter_path = tempfile.gettempdir() + '/parameters.json' blob_service_client = BlobServiceClient.from_connection_string( os.environ['neaeraiotstorage_storage'], logger=logger) container_name = 'workzoneuploads' blob_name = event.subject.split('/')[-1] logging.info('CSV: container: ' + container_name + ', blob: ' + blob_name) blob_client = blob_service_client.get_blob_client(container=container_name, blob=blob_name) logging.info('39') with open(csv_path, 'wb') as download_file: download_file.write(blob_client.download_blob().readall()) logging.info('42') blob_name = 'config' + wzID + '.json' container_name = 'publishedconfigfiles' logging.info('Config: container: ' + container_name + ', blob: ' + blob_name) blob_client = blob_service_client.get_blob_client(container=container_name, blob=blob_name) with open(config_path, 'wb') as download_file: download_file.write(blob_client.download_blob().readall()) logging.info('Wrote local files') # with open(parameter_path, 'w') as f: # parameters = {} # parameters['id'] = wzID # f.write(json.dumps(parameters)) buildmsgs_and_export.build_messages_and_export(wzID, csv_path, config_path, updateImage)
def main(event: func.EventGridEvent): logging.info('') blob_data = event.get_json() blob_url = blob_data['url'] logging.info(f"Python event trigger function processed blob \n" f"Name: {blob_url}") payload = dict(blob_url=blob_url) requests.post(ARGO_EVENTS_ENDPOINT, json=payload) logging.info('Python EventGrid sent blob {payload} to endpoint {endpoint}'.format(payload=payload, endpoint=ARGO_EVENTS_ENDPOINT))
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result)
def form_data_af_event_grid( event: EventGridEvent, *args, ): """ Used to extract the payload from an Event Grid Event. :param event: An Event Grid Event. :return: A JSON-formatted dictionary; its "DATA" field contains the event payload. """ return {"DATA": event.get_json()}
async def main(event: func.EventGridEvent, starter: str): client = df.DurableOrchestrationClient(starter) logging.info("Python EventGrid trigger processed an event: %s", event.get_json()) blob = Blob(full_url=event.get_json()["url"]) logging.info(f"Blob: {blob}") try: status = await client.get_status(blob.order_id) instance_id = status.instance_id logging.info("Got existing orchestrator, with instance id: %s", instance_id) except Exception: instance_id = await client.start_new("Orchestrator", blob.order_id, None) logging.info("Started new orchestration, with instance id: %s", instance_id) if instance_id: await client.raise_event(instance_id, blob.file_type, True) else: logging.error("Could not start orchestrator for instance: %s", blob.order_id)
def main(event: func.EventGridEvent, inputblob: func.InputStream): logging.info('Entró a la función Thumbnail') result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) event_data = event.get_json() url = event_data['url'] if inputblob: img_bytes = inputblob.read() img_array = np.fromstring(img_bytes, dtype=np.uint8) img = cv.imdecode(img_array, cv.IMREAD_COLOR) h, w, _ = img.shape scale = min(300 / w, 300 / h) nw, nh = int(scale * w), int(scale * h) thumbnail = cv.resize(img, (nw, nh), cv.INTER_CUBIC) buffer = cv.imencode(os.path.basename(url), thumbnail)[1] blob_name = getBlobNameFromUrl(url) blob_service_client = BlobServiceClient.from_connection_string(conn_str=BLOB_STORAGE_CONNECTION_STRING) blob_container_client = blob_service_client.get_container_client('thumbnail') try: blob_container_client.upload_blob(name=blob_name, data=buffer.tobytes(), overwrite=True) logging.info('Miniatura creada exitosamente.') except Exception as e: logging.error('No se pudo subir la miniatura:', str(e)) logging.info('Python EventGrid trigger processed an event: %s', result)
def main(event: func.EventGridEvent): logging.info(sys.version) logging.info(event) result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type }) logging.info(result) deserialized_event = EventGridEvent.from_dict(json.loads(result)) ## can only be EventGridEvent print("model: {}".format(event.model))
def main(event: func.EventGridEvent): logging.info("env info starts") logging.info(db_connection_str) logging.info(storage_account_name) logging.info(storage_account_key) logging.info("env info ends") result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info("received event:" + result) try: if event.get_json()["eventCount"] == 0: pass dump(event.get_json()["fileUrl"]) except Exception as e: logging.error(e)
def main(event: func.EventGridEvent): """Respond to Event Grid events from Key Vault for: Microsoft.KeyVault.CertificateNearExpiry", Microsoft.KeyVault.CertificateExpired """ result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result)
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger received an event: %s', result) blob_url = event.get_json().get("url") if blob_url is None: logging.info( f"{event.id} did not contain a 'url' attribute in its 'data' attribute. Exiting." ) exit(1) # Reshape the blob url to be wasbs format (wasbs://CONTAINER@STORAGE_ACCOUNT/FILE_PATH) # Input as https://STORAGE_ACCOUNT/CONTAINER/FILE_PATH slashes_pos = blob_url.index('://') + 3 end_storage_acct_pos = blob_url[(slashes_pos):].index('/') + slashes_pos end_container_pos = blob_url[(end_storage_acct_pos + 1):].index('/') + end_storage_acct_pos + 1 storage_acct = blob_url[slashes_pos:end_storage_acct_pos] container = blob_url[(end_storage_acct_pos + 1):end_container_pos] filepath = blob_url[(end_container_pos + 1):] wasbs_url = f"wasbs://{container}@{storage_acct}/{filepath}" logging.info(f"Running a job for this wasbs file: {wasbs_url}") job_results = run_job(wasbs_url) job_string = json.dumps(job_results) logging.info(f"Job trigger results were: {job_results}") logging.info(f"Completed Event Grid Trigger for {event.id}")
def main(event: func.EventGridEvent): logging.info('Function triggered to process a message: ', event.get_body()) logging.info(' EnqueuedTimeUtc =', event.enqueued_time) logging.info(' SequenceNumber =', event.sequence_number) logging.info(' Offset =', event.offset) result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result)
def main(event: func.EventGridEvent, outputBlob: func.Out[func.InputStream]): result = json.dumps( { 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }, indent=4) logging.info(f'Saving payload as blob: {result}') outputBlob.set(result) logging.info('Python EventGrid trigger processed an event')
def main(event: func.EventGridEvent): logging.info('Event received. Parsing it.') try: now = datetime.now() logging.info('Starting at %s', now) # Gathering data from event and defining some vault_name = event.get_json().get('vaultName') vault_url = f'https://{vault_name}.vault.azure.net' resource_group_name = event.topic.split('/')[4] subscription_id = event.topic.split('/')[2] host_pool_name = event.subject expiration_time = now + timedelta(days=7) url = f'https://management.azure.com/subscriptions/{subscription_id}/resourceGroups/{resource_group_name}/providers/Microsoft.DesktopVirtualization/hostPools/{host_pool_name}?api-version=2019-12-10-preview' head = '' payload = { 'properties': { "registrationInfo": { "expirationTime": expiration_time.isoformat(), "registrationTokenOperation": "Update" } } } logging.info('Generating registration token for host pool: %s ', host_pool_name) # Generating a new registration token response = requests.patch(url, json=payload, headers=head) print(response.text) registration_token = '' # logging.info('Storing registration token in secret named: %s ', host_pool_name) # # Acquiring credential and client object and set secret value # credential = DefaultAzureCredential() # secret_client = SecretClient(vault_url=vault_url, credential=credential) # secret_client.set_secret(host_pool_name, registration_token, expires_on=expiration_time) # logging.info('A registration token for %s has been created successfully.', host_pool_name) except Exception as err: logging.error("Error occured: {0}".format(err)) logging.info('End of execution.') ## End of getting host pool registration token
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) context = adal.AuthenticationContext(LOGIN_ENDPOINT + '/' + TENANT_ID) credentials = AdalAuthentication( context.acquire_token_with_client_credentials, RESOURCE, CLIENT, KEY) # You can now use this object to perform different operations to your AMS account. client = AzureMediaServices(credentials, SUBSCRIPTION_ID) logging.info("signed in to ams") logging.info('assest list %s', client.assets.list(RESOUCE_GROUP_NAME, ACCOUNT_NAME).get(0)) logging.info('Python EventGrid trigger processed an event: %s', result)
def main(event: func.EventGridEvent): #Trigger function with info coming in from simulated IoT device result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, 'event_test': 'test string' }) #Parse json string from simulated device to get only the information needed test = json.loads(result) test2 = test['data']['body']['MachineID'] test3 = test['data']['body'] logging.info(test2) #Decide what function (GET/PUT/POST) the info goes to #GetEndpoint replaces the GET URL which is hidden in Cloud, test2 takes the place of {MachineID} in URL getURL = os.environ["GetEndpoint"] if requests.get(getURL) == 200 or 202: logging.debug( "If response is not successful, check GET URL function to make sure it's working properly" ) #If URL doesn't give back a 404, make a GET request response = requests.get(getURL) logging.info(response.text) #PutEndpoints replaces the PUT URL and json=test3 is passed in after URL putURL = os.environ["PutEndpoint"] #If URL gives a 500 response, make an update/PUT request if response == 200 or 202: logging.info(test3) response = requests.put(putURL, json=test3) logging.info(response.text) logging.debug("Got response 404.") #Otherwise, create a message with a POST request else: #PostEndpoint replaces the POST URL and json=test3 is passed in after URL postURL = os.environ["PostEndpoint"] response = requests.post(postURL, json=test3) logging.info(response.text) logging.info('Python EventGrid trigger processed an event: %s', result)
def main(event: func.EventGridEvent, msg: func.Out[str]): subject = (event.subject).split('/') options = { "fileUrl": event.get_json()['url'], "container": subject[4], "blob": subject[6] } logging.info('Python EventGrid trigger processed an event: %s', options) fileURL = options['fileUrl'] logging.info(f"fileURL: {fileURL}") containerInput = options['container'] logging.info(f"containerInput: {containerInput}") result = initial_function(fileURL=fileURL, containerInput=containerInput) logging.info(f"result: {result}") if result == "split": logging.info("File too big, so splitting needed abc") queueMessage = f"{fileURL}__________{containerInput}__________ALL" logging.info(f"Message added to queue: {queueMessage}") msg.set(queueMessage) elif result == "retry": ## Get blob name fileName = unquote(fileURL.split("/")[-1]) ## Get first bit before "_" prefix = fileName.split("_")[0] if "of" not in prefix: raise ValueError( "We have an error from a non-split file, not sure what to do...." ) else: logging.info("Reading in URL failed, let's try to create it again") ## Get rid of the "XofY_" bit at the front of the file name fileURL2 = fileURL.replace(f"{prefix}_", "") queueMessage = f"{fileURL2}__________{containerInput}__________{prefix}" logging.info(f"Message added to queue: {queueMessage}") msg.set(queueMessage)
def main(event: func.EventGridEvent): logging.info(os.environ) result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) BATCH_ACCOUNT_NAME = os.getenv('BATCH_ACCOUNT_NAME') BATCH_ACCOUNT_KEY = os.getenv('BATCH_ACCOUNT_KEY') BATCH_ACCOUNT_URL = os.getenv('BATCH_ACCOUNT_URL') BATCH_POOL_ID = os.getenv('BATCH_POOL_ID') BATCH_DOCKER_IMAGE = os.getenv('BATCH_DOCKER_IMAGE') STORAGE_ACCOUNT = os.getenv('STORAGE_ACCOUNT_NAME') STORAGE_KEY = os.getenv('STORAGE_KEY') OUTPUT_CONTAINER = os.getenv('OUTPUT_CONTAINER') credentials = batch_auth.SharedKeyCredentials(BATCH_ACCOUNT_NAME, BATCH_ACCOUNT_KEY) batch_client = batch.BatchServiceClient(credentials, batch_url=BATCH_ACCOUNT_URL) taskid = str('task-') + str(uuid.uuid4())[:8] storage_path = str(event.subject) storage_path_split = storage_path.split("/") container_name = storage_path_split[4] file_name = storage_path_split[6] add_tasks(batch_client, BATCH_POOL_ID, taskid, BATCH_DOCKER_IMAGE, STORAGE_ACCOUNT, STORAGE_KEY, container_name, file_name, OUTPUT_CONTAINER) logging.info('Task Submitted as task: %s' % taskid) logging.info(result)
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) logging.info('Python EventGrid trigger processed an event: %s', result) try: config_json = readjson_from_file("/home/site/wwwroot/config.json") nice_ml_url = config_json['NICE_ML_APP_SERVICE_URL'] except KeyError: logging.info( 'could not load config file properly keys not found exiting function' ) exit(0) post_to_create_reference_data(nice_ml_url) logging.info('done with all processing exiting function')
def main(event: func.EventGridEvent): result = json.dumps({ 'id': event.id, 'data': event.get_json(), 'topic': event.topic, 'subject': event.subject, 'event_type': event.event_type, }) connect_str = os.getenv('STORAGE_CONNECTION') input_container = os.getenv('INPUT_CONTAINER') output_container = os.getenv('OUTPUT_CONTAINER') url_of_blob = None blob_name = None try: data_of_event = event.get_json() url_of_blob = data_of_event.get('url', None) split_url = url_of_blob.split('/') # https://storageaccount.blob.core.windows.net/container/blobname/test.xlsx blob_name = '/'.join(split_url[4:]) container_name = split_url[3] except Exception as e: logging.info('Python EventGrid Failed to process this event: %s', result) logging.info('%s', e) return None if url_of_blob is None: logging.info('There was no blob url in this event: %s', result) return None if container_name != input_container: logging.info( "Triggered for an irrelevant event on the {} container".format( container_name)) return None # Download the blob blob_service_client = BlobServiceClient.from_connection_string(connect_str) input_excel = blob_service_client.get_blob_client(container=container_name, blob=blob_name) original_file_name, _ = os.path.splitext(blob_name) new_file_name = "{}.csv".format(original_file_name) # Define a temporary location to store the csv upload_file_path = "./{}".format(new_file_name) df = pd.read_excel(input_excel.download_blob().readall()) # Save the excel as a dataframe (and omit the auto-generated index) df.to_csv(upload_file_path, index=False) output_csv = blob_service_client.get_blob_client( container=output_container, blob=new_file_name) # Upload the created file with open(upload_file_path, "rb") as data: output_csv.upload_blob(data) logging.info("Success!") logging.info('Python EventGrid trigger processed an event: %s', result)
def main(event: func.EventGridEvent): process_event(event.get_json())