def main(event: func.EventHubEvent): try: logging.info('TicketValidation function processed an event: %s', event.get_body().decode('utf-8')) # Processing Ticket Validation Requirements # Simulating call to 3rd party services by putting a random wait sleep(randint(1, 10)) msgObjList = json.loads(event.get_body().decode('utf-8')) msgObj = msgObjList[0] rand = randint(0, 2) # This is currently returning the values 0 or 1 randomly # and enriching message with added data fields if (rand == 0): msgObj["ticketAvailable"] = 0 else: msgObj["ticketAvailable"] = 1 msgObj["timeProcessed"] = datetime.utcnow() # Add code based on Architecure decision except Exception as e: logging.error(e)
def main(event: func.EventHubEvent): logging.info('Function triggered to process a message: ', event.get_body()) logging.info(' EnqueuedTimeUtc =', event.enqueued_time) logging.info(' SequenceNumber =', event.sequence_number) logging.info(' Offset =', event.offset) logging.info('Python EventHub trigger processed an event: %s', event.get_body().decode('utf-8'))
def main(event: func.EventHubEvent, doc: func.Out[func.Document]): logging.info('Function triggered to process a message: %s', event.get_body()) logging.info(' EnqueuedTimeUtc = %s', event.enqueued_time) logging.info(' SequenceNumber = %s', event.sequence_number) logging.info(' Offset = %s', event.offset) request_body = event.get_body() doc.set(func.Document.from_json(request_body))
def main(event: func.EventHubEvent) -> str: #logging.info('Python EventHub trigger processed an event: %s', event.get_body().decode('utf-8')) logging.info('WOPS: %s', event.get_body()) pack = vic.vicpack() # instantiate a vicpack class parser pack.add(event.get_body().decode('utf-8')) # add measurement pack.detail = True # when self.__str__ is invoked, print all packet contents pack.prefix = False # do not invoke SI-prefix parser print(pack) print(pack.export()) return "{}".format(pack.export())
def main(event: func.EventHubEvent): logger.info( f'Processing message from cloud_scales event hub and pushing to event_hub' ) logger.debug('Python EventHub trigger processed an event: %s', event.get_body().decode('utf-8')) message = loads(event.get_body().decode('utf-8')) logger.debug(f"received message: {message}") message['datetime'] = dateutil_parse(message['published_at']).timestamp() logger.debug(f'composed message {dumps(message)}') return dumps(message)
def main(event: func.EventHubEvent, outputblob: func.Out[func.InputStream]): logging.info('### Message received!') logging.info('DeviceId: ' + str(event.iothub_metadata['connection-device-id'])) logging.info('Time: ' + str(event.iothub_metadata['enqueuedtime'])) logging.info('SequenceNumber: ' + str(event.sequence_number)) logging.info('=== START MESSAGE BODY ===') logging.info(event.get_body()) logging.info('=== END MESSAGE BODY ===') # Send to blob storage, dump the unmodified JSON payload outputblob.set(event.get_body())
def main(event: func.EventHubEvent): handler = LoggingHandler(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY']) logging.basicConfig(handlers=[ handler ], format='%(levelname)s: %(message)s', level=logging.DEBUG) tc = TelemetryClient(os.environ['APPINSIGHTS_INSTRUMENTATIONKEY']) tc.track_event("Incoming event") tc.flush() logging.info('Function triggered to process a message: %s', event) logging.info(' body: %s', event.get_body()) logging.info(' EnqueuedTimeUtc: %s', event.enqueued_time) logging.info(' SequenceNumber: %s', event.sequence_number) logging.info(' Offset: %s', event.offset) logging.info(' Partition: %s', event.partition_key) logging.info(' Metadata: %s', event.iothub_metadata) table_service = TableService(connection_string=os.environ['AzureTableConnectionString']) for datapoint in json.loads(event.get_body()): # Expected data format: # {"timestamp": 1564598054, "deviceid": "Node1", "scale": 2, "temperature": 1.1,"weight": 10000} if datapoint is not None and 'deviceid' in datapoint and \ 'timestamp' in datapoint and 'scale' in datapoint and \ 'weight' in datapoint: logging.debug(' datapoint: %s', (datapoint)) # deviceid is used as partition key. # {timestamp}-{scale} is used as RowKey # timestamp and scale number are duplicated as an int columns # to keep them searchable. The rest of the datapoint elements # are added as columns as well. history = {} history['PartitionKey'] = datapoint.pop('deviceid') history['RowKey'] = str(datapoint['timestamp']) + '-' + str(datapoint['scale']) history.update(datapoint.items()) logging.debug('history: %s' % (history)) table_service.insert_entity(TABLE_NAME_HISTORICAL_DATA, history) logging.info('Added historical table data: %s', (history)) # Touch/create the row in the config table for each reported scale with latest weight configupdate = {} configupdate['PartitionKey'] = history['PartitionKey'] configupdate['RowKey'] = str(history['scale']) configupdate['weight'] = history['weight'] if 'temperature' in history: configupdate['temperature'] = history['temperature'] logging.info('config update: %s' % (configupdate)) logging.info('Writing to table: %s' % (TABLE_NAME_CONFIGURATION)) table_service.insert_or_merge_entity(TABLE_NAME_CONFIGURATION, configupdate) logging.info('Updated configuration table entry: %s', (configupdate)) else: logging.info(' Invalid datapoint: %s', (datapoint))
def main(event: func.EventHubEvent, fromeventhub: func.Out[func.Document]) -> str: try: logging.info('Function triggered to process a message: %s', event.get_body(), '\n EnqueuedTimeUtc =', event.enqueued_time, '\n SequenceNumber =', event.sequence_number, '\n Offset =', event.offset) except Exception as e: print(e) pass event_body = event.get_body() fromeventhub.set(func.Document.from_json(event_body))
def main(event: func.EventHubEvent): logging.info('Python EventHub trigger processed an event: %s', event.get_body().decode('utf-8')) log_list = json.loads(event.get_body().decode('utf-8')) single_log = log_list[0] #if edge try: if single_log['data']["type"] == "edge": insert_row(single_log['data']['ismoving'], False) #else is a list except: current = single_log['data'][0] last = single_log['data'][1] insert_row(is_moving(last, current), True, current["x"], current["y"], current["z"])
def main(event: func.EventHubEvent): w = Widget.from_json(event.get_body().decode('utf-8')) c = Widget_Classification() c.classified_time = datetime.utcnow() c.mean = random.randrange(1, 100) c.std = random.randrange(1, 2) c.std_dist = random.randrange(1, 3) c.threshold = random.randrange(1, 100) w.classification = c (result, good) = c.is_good() assert result.success rowId = uuid.uuid4().hex if not good: sqlDao = WidgetSqlDAO(connectODBC) sqlDao.persistWidget(w, rowId) sqlDao.disconnect() # Create a sample entity to insert into the table tableDao = WidgetTableDAO(connectTable(), "Predictions") tableDao.persistWidget(w, rowId) result = w.to_json() logging.info('Python EventHub trigger processed an event: %s', result) return result
def main(event: func.EventHubEvent): devid = event.enqueued_time stateUpdates = {} messages = json.loads(event.get_body().decode('utf-8')) for msg in messages: environment = {} parseTelemetry(msg, environment, partitionKey) calibrationData = getCalibrationData(calibrationDictionary, msg['deviceId']) if calibrationData is not None: environment["Celsius"] = calibrate( environment["Celsius"], calibrationData["TemperatureSlope"], calibrationData["TemperatureYIntercept"]) environment["Humidity"] = calibrate( environment["Humidity"], calibrationData["HumiditySlope"], calibrationData["HumidityYIntercept"]) environment["hPa"] = calibrate( environment["hPa"], calibrationData["PressureSlope"], calibrationData["PressureYIntercept"]) stateUpdates[msg['deviceId']] = environment for item in stateUpdates: table_service.insert_or_replace_entity(deviceStateTable, environment) notifyClients(signalrUrl, environment)
def main(event: func.EventHubEvent): ''' Entrypoint for Function 'process' of Azure Function App 'iothub-to-mongodb'. Read messages IoT Hub and insert them into MongoDB. By setting 'cardinality' to 'many' in the function.json the body of the event objects contains a list of messages instead of single messages (batch reads). ''' MONGO_URI = os.environ["MONGO_URI"] if MONGO_URI == None: raise ValueError('No MongoDB Cluster provided. Will exit.') #read messages from event and group them by action messages = json.loads(event.get_body().decode('utf-8')) grouped_messages = defaultdict(list) for msg in messages: action = msg.pop('action', 'none') grouped_messages[action].append(msg) #messages with action == fullRefresh are split into status and station messages split_full_refresh_messages(grouped_messages) mongo_client = MongoClient(MONGO_URI) db = mongo_client.citibike #do bulk inserts for both types of messages refresh_stations(db, grouped_messages.get('refreshStation', [])) refresh_status(db, grouped_messages.get('refreshStatus', [])) return
def main(event: func.EventHubEvent): message_body = event.get_body().decode() partition = event.metadata['PartitionContext']['PartitionId'] logging.info( f'EH: Function triggered to process a message: {message_body}') logging.info(f'EH: EnqueuedTimeUtc = {event.enqueued_time}') logging.info(f'EH: PartitionId = {partition}') logging.info(f'EH: SequenceNumber = {event.sequence_number}') logging.info(f'EH: Offset = {event.offset}') # Metadata for key in event.metadata: logging.info( f'EH: Metadata: {key} of type {type(event.metadata[key])} = {event.metadata[key]}' ) blobContent = { "enqueuedAt": f'{event.enqueued_time}', "partitionId": partition, "eventContent": message_body } blobName = f'{blobContent["partitionId"]}/{event.enqueued_time.year}/{event.enqueued_time.month}/{event.enqueued_time.day}/{event.enqueued_time.hour}_{event.sequence_number}_{event.offset}' sa_cs = os.environ["StorageAccountConnectionString"] sa_container = os.environ["StorageAccountContainerName"] blob = BlobClient.from_connection_string(conn_str=sa_cs, container_name=sa_container, blob_name=blobName) blob.upload_blob(json.dumps(blobContent))
def main(event: func.EventHubEvent): cosmosdb_order_masterKey = os.environ.get('cosmosdb_order_masterKey') cosmosdb_order_host = os.environ.get('cosmosdb_order_host') cosmosdb_order_databaseId = os.environ.get('cosmosdb_order_databaseId') cosmosdb_order_collectionId = os.environ.get('cosmosdb_order_collectionId') event_body = event.get_body().decode('utf-8') logging.info(event_body) combineOrderContent = os.environ.get('combineOrderContent') response = requests.post(combineOrderContent, data=event_body) doc_json = response.json() client = document_client.DocumentClient( cosmosdb_order_host, {'masterKey': cosmosdb_order_masterKey}) for sale in doc_json: dbLink = 'dbs/' + cosmosdb_order_databaseId collLink = dbLink + '/colls/' + cosmosdb_order_collectionId sale['salesNumber'] = sale['headers']['salesNumber'] client.CreateDocument(collLink, sale)
def main(event: func.EventHubEvent): messages = json.loads(event.get_body().decode('utf-8')) for msg in messages: sortResponse = sorted(msg, key=lambda k: k['probability'], reverse=True)[0] sortResponse['PartitionKey'] = partitionKey sortResponse['RowKey'] = sortResponse['tagName'] # Note, Count read and update is not transactional try: entity = table_service.get_entity(imageScannerTable, partitionKey, sortResponse['tagName']) if 'Count' not in entity: count = 0 else: count = entity['Count'] except: count = 0 sortResponse['Count'] = count + 1 table_service.insert_or_replace_entity(imageScannerTable, sortResponse)
def main(event: func.EventHubEvent, cosmoDocument: func.Out[func.Document]): logging.info('Python EventHub2Cosmo trigger processed an event: %s', event.get_body().decode('utf-8')) try: # Get Blog feeds outdata = event.get_body().decode('utf-8').strip('[').strip( ']') # remove the extra charactors by EventHub logging.info('outdata: %s', outdata) # for debug # Store output data using Cosmos DB output binding cosmoDocument.set(func.Document.from_json(outdata)) except Exception as e: logging.error('Error:') logging.error(e)
def main(event: func.EventHubEvent, msg: func.Out[func.QueueMessage]): body = event.get_body().decode('utf-8') if body != '': msg.set(body) logging.info('Python EventHub trigger processed an event: %s', body) else: logging.info('Python EventHub trigger error')
def main(event: func.EventHubEvent): event_json = event.get_body().decode('utf-8') event_item = json.loads(event_json) return json.dumps({ 'target': 'newDeviceMessage', 'arguments': [event_item] })
def main(event: func.EventHubEvent): device_id = event.iothub_metadata['connection-device-id'] message_event = json.loads(event.get_body())["messageEvent"] logging.info(device_id + " " + message_event) if message_event == "FARM_DATA_LOAD": # send farm data back to the device get_farm_data(device_id) elif message_event == "RUN_FINISHED": # update run_id on cosmos run_id = json.loads(event.get_body())["data"]["runId"] farm_id = update_run_id(device_id, run_id) logging.info("Updated run: " + str(run_id) + " " + farm_id) # invoke ML prediction response = invoke_pred(farm_id, run_id) logging.info(response)
def _get_event_data(event: func.EventHubEvent) -> List[Any]: if isinstance(event, Iterable): logger.debug('iterable') return [ loads(s) for s in [e.get_body().decode('utf-8') for e in event] ] else: logger.debug('not iterable') return loads(event.get_body().decode('utf-8'))
def main(event: func.EventHubEvent): data = event.get_body().decode('utf-8') telemetry = json.loads(data) for item in telemetry: temperature = item.get("temperature") if temperature is not None and type(temperature) is float and 31 < temperature < 40: print(temperature) sendTwitterMsg()
def main(event: func.EventHubEvent): thingspeak_dict = loads(environ.get("thingspeak_keys_dict")) thingspeak_api = environ.get("thingspeak_api_endpoint") message = loads(event.get_body().decode('utf-8')) logger.info(f'Processing eventhub message to send to thingspeak') logger.debug(f"received message: {message}") send_message_to_thingspeak(message, thingspeak_dict, thingspeak_api) logger.debug("Sent to thingspeak")
def main(event: func.EventHubEvent): payload = event.get_body() logging.info('Python EventHub trigger processed an event: %s', event.get_body().decode('utf-8')) #can we get the body as json alert_contents = event.get_body().decode('utf-8') if type(alert_contents) == str: alerts = json.loads(alert_contents) else: alerts = json.load(alert_contents) logging.info('testing alerts %s', alert_contents) #https://docs.microsoft.com/en-us/azure/azure-monitor/platform/alerts-common-schema-definitions for alert in alerts: [ logging.info(f"Found alert Target ID: {alertTargetID}") for alertTargetID in alert['data']['essentials']['alertTargetIDs'] ]
async def main(event: func.EventHubEvent) -> bytes: event_dict: typing.Mapping[str, typing.Any] = { 'body': event.get_body().decode('utf-8'), # Uncomment this when the EnqueuedTimeUtc is fixed in azure-functions # 'enqueued_time': event.enqueued_time.isoformat(), 'partition_key': event.partition_key, 'sequence_number': event.sequence_number, 'offset': event.offset, 'metadata': event.metadata } return json.dumps(event_dict)
def main(event: func.EventHubEvent): eventString = event.get_body().decode('utf-8') sensorId = event.metadata.get("SystemProperties", {}).get("iothub-connection-device-id", None) props = event.metadata.get("Properties", {}) testDevice = (props.get("testDevice", "false") == "true") logging.info(f"Event properties: {props}, testDevice={testDevice}") if sensorId == None: raise Error("Missing device id") blob.storeData(sensorId, eventString, testDevice)
def main(event: func.EventHubEvent): storage_orders_conn_str = os.environ.get('storage_orders_conn_str') storage_orders_container = os.environ.get('storage_orders_container') eventhub_ns_sap_sl = os.environ.get('eventhub_ns_sap_sl') eventhub_order_combine_files = os.environ.get('eventhub_order_combine_files') container = ContainerClient.from_connection_string(conn_str = storage_orders_conn_str, container_name = storage_orders_container) event_body = event.get_body().decode('utf-8') logging.info("OrderFileSeeker-event_body" + event_body) event_json = json.loads(event_body) for e in event_json: url = e["data"]["url"] if 'orders' not in url: return url_dirname = os.path.dirname(url) url_basename = os.path.basename(url) order_id = re.findall(r'\d+', url_basename)[0] blobs = list(container.list_blobs(name_starts_with = order_id)) blob_ts = {b['name'] : (b['last_modified'], b['etag']) for b in blobs} blob_ts_max = max(blob_ts.values()) doc = dict() if len(blobs) == 3 and blob_ts[url_basename] == blob_ts_max: for b in blobs: blob_name = b['name'] file_type = order_file_type.get(re.findall(r'(?<=-)\w+(?=\.)', blob_name)[0].lower()) doc[file_type] = f'{url_dirname}/{blob_name}' doc_json = json.dumps(doc) producer = EventHubProducerClient.from_connection_string(conn_str = eventhub_ns_sap_sl, eventhub_name = eventhub_order_combine_files) try: event_data_batch = producer.create_batch() event_data_batch.add(EventData(doc_json)) producer.send_batch(event_data_batch) finally: producer.close()
def main(event: func.EventHubEvent): logging.info('Python EventHub trigger processed an event: %s', event.get_body().decode('utf-8')) eventBody = event.get_body().decode('utf-8') apiKey = os.environ['SENDGRID_API_KEY'] if ("test" in eventBody): print("custom event found") messageText = "custom event found" message = Mail( from_email='*****@*****.**', to_emails='*****@*****.**', subject='Sending with Twilio SendGrid is Fun', html_content= '<strong>and easy to do anywhere, even with Python</strong><br>custom event found' + '<br>' + eventBody) try: sg = SendGridAPIClient(apiKey) response = sg.send(message) print(response.status_code) print(response.body) print(response.headers) except Exception as e: print(str(e))
def main(event: func.EventHubEvent, cosmosDbDoc: func.Out[func.Document]): logging.info( 'Python HTTP trigger function "ProcessSalesEvents" processed a request.' ) # Decode event hub binary payload to JSON message = event.get_body().decode('utf-8') sale_events = json.loads(message) logging.info(f'Event Count: {len(sale_events)}') # Iterate through sales events and load each event in Azure Cosmos DB for sale_event in sale_events: newdoc = sale_event newdoc['id'] = str(uuid.uuid4()) cosmosDbDoc.set(func.Document.from_dict(newdoc))
def main(event: func.EventHubEvent): # for some reason, when executing in azure, we dont always get an iterable, even with cardinality=many # so test for it if isinstance(event, Iterable): logger.debug('iterable') event_data = [ loads(s) for s in [e.get_body().decode('utf-8') for e in event] ] else: logger.debug('not iterable') event_data = loads(event.get_body().decode('utf-8')) logger.info(f'Received trigger for {len(event_data)} items') logger.debug(f'payload: {dumps(event_data)}') return _write_to_database(event_data)
def main(event: func.EventHubEvent) -> str: text = "" try: tweet = json.loads(event.get_body().decode('utf-8')) text = tweet[0]["text"] logging.info('Python EventHub trigger processed a tweet: %s', text) except KeyError: logging.error('Error parsing tweet.') pass else: # Tokenize the tweet and outputs it. tokenized = p.tokenize(text) logging.info('Tweet tokenized into: %s', tokenized) return tokenized