def publish_async(topic: str, message: bytes, qos: QOS) -> concurrent.futures.Future: request = PublishToIoTCoreRequest() request.topic_name = topic request.payload = message request.qos = qos operation = ipc_client.new_publish_to_iot_core() operation.activate(request) future = operation.get_response() return future
def publish_results_to_iot_core (message): # Publish highest confidence result to AWS IoT Core global ipc_client request = PublishToIoTCoreRequest() request.topic_name = inference_result_topic request.payload = bytes(json.dumps(message), "utf-8") request.qos = QOS.AT_LEAST_ONCE operation = ipc_client.new_publish_to_iot_core() operation.activate(request) future = operation.get_response() future.result(10)
def publishMessage_mqtt(mqtt_topic, payload): try: message = json.dumps(payload) qos = QOS.AT_LEAST_ONCE request = PublishToIoTCoreRequest() request.topic_name = mqtt_topic request.payload = bytes(message, "utf-8") request.qos = qos operation = ipc_client.new_publish_to_iot_core() operation.activate(request) future = operation.get_response() future.result(TIMEOUT) except Exception as e: logging.info( "Publish MQTT Message Error : {0}, topic : {1}, payload : {2}". format(str(e), mqtt_topic, message))
def publish_results_to_cloud(self, PAYLOAD): r""" Ipc client creates a request and activates the operation to publish messages to the IoT core with a qos type over a topic. :param PAYLOAD: An dictionary object with inference results. """ try: request = PublishToIoTCoreRequest( topic_name=config_utils.TOPIC, qos=config_utils.QOS_TYPE, payload=dumps(PAYLOAD).encode(), ) operation = ipc_client.new_publish_to_iot_core() operation.activate(request).result(config_utils.TIMEOUT) config_utils.logger.info("Publishing results to the IoT core...") operation.get_response().result(config_utils.TIMEOUT) except Exception as e: config_utils.logger.error("Exception occured during publish: {}".format(e.message))
def predict(image_data): r""" Predict image with DLR. :param image: numpy array of the Image inference with. """ try: # Run DLR to perform inference with DLC optimized model model_output = dlr_model.run(image_data) max_score_id = np.argmax(model_output) max_score = np.max(model_output) print("max score id:", max_score_id) print("class:", labels[max_score_id]) print("max score", str(max_score)) probabilities = model_output[0][0] sort_classes_by_probability = np.argsort(probabilities)[::-1] results_file = "{}/{}.log".format( results_directory, os.path.basename(os.path.realpath(model_path))) message = '{"class":"' + labels[ max_score_id] + '"' + ',"confidence":"' + str(max_score) + '"}' payload = { "message": message, "timestamp": datetime.now().strftime('%Y-%m-%dT%H:%M:%S') } topic = "demo/topic" if enableSendMessages: ipc_client.new_publish_to_iot_core().activate( request=PublishToIoTCoreRequest(topic_name=topic, qos='0', payload=json.dumps( payload).encode())) with open(results_file, 'a') as f: print("{}: Top {} predictions with score {} or above ".format( str(datetime.now()), max_no_of_results, score_threshold), file=f) for i in sort_classes_by_probability[:max_no_of_results]: if probabilities[i] >= score_threshold: print("[ Class: {}, Score: {} ]".format( labels[i], probabilities[i]), file=f) except Exception as e: print("Exception occurred during prediction: %s", e)
connection = ipc_utils.connect() ipc_client = client.GreengrassCoreIPCClient(connection) opcUAclient = Client("opc.tcp://localhost:4840/freeopcua/server/") try: opcUAclient.connect() val = opcUAclient.get_node("ns=2;i=2") while True: dataValue = val.get_data_value().Value value = float(dataValue.Value) topic = "OPCUAServer1/test/myVariable" qos = QOS.AT_LEAST_ONCE request = PublishToIoTCoreRequest() request.topic_name = topic request.payload = bytes('{"value": "' + str(value) + '"}', "utf-8") request.qos = qos operation = ipc_client.new_publish_to_iot_core() operation.activate(request) future = operation.get_response() future.result(TIMEOUT) variant = Variant(double_value=value) siteWiseTopic = "/testvariable/opcua" try: print("Appending IoTSiteWiseEntry to stream") streamClient.append_message( stream_name,
cnt = 0 while True: idx = np.random.randint(0, test_X.shape[0]) img_array = test_X[idx:idx + 1, :, :, :] pred_y = np.argmax( classifier_neo.run(test_X[0, :, :, :].reshape(1, 1, 28, 28))[0]) result = 'anomaly' if pred_y % 2 == 0 else 'normal' cnt = cnt + 1 message = { "timestamp": str(datetime.datetime.now()), "message": result, "counter": str(cnt), "component_version": "1.0.1", "thing_name": THING_NAME } request = PublishToIoTCoreRequest(topic_name=topic, qos=QOS.AT_LEAST_ONCE, payload=bytes(json.dumps(message), "utf-8")) operation = ipc_client.new_publish_to_iot_core() operation.activate(request) future = operation.get_response() future.result(TIMEOUT) logger.info("publish") sleep(INTERVAL)
def run(): global edge_manager_client ipc_client = awsiot.greengrasscoreipc.connect() try: response = edge_manager_client.LoadModel( LoadModelRequest(url=model_url, name=model_name)) except Exception as e: print('Model failed to load.') print(e) while (True): time.sleep(30) print('New prediction') image_url = image_urls[random.randint(0, 3)] print('Picked ' + image_url + ' to perform inference on') # Scale image / preprocess img = cv2.imread(image_url) frame = resize_short_within(img, short=SIZE, max_size=SIZE * 2) nn_input_size = SIZE nn_input = cv2.resize(frame, (nn_input_size, int(nn_input_size / 4 * 3))) nn_input = cv2.copyMakeBorder(nn_input, int(nn_input_size / 8), int(nn_input_size / 8), 0, 0, cv2.BORDER_CONSTANT, value=(0, 0, 0)) copy_frame = nn_input[:] nn_input = nn_input.astype('float32') nn_input = nn_input.reshape((nn_input_size * nn_input_size, 3)) scaled_frame = np.transpose(nn_input) # Call prediction request = PredictRequest(name=model_name, tensors=[ Tensor(tensor_metadata=TensorMetadata( name=tensor_name, data_type=5, shape=tensor_shape), byte_data=scaled_frame.tobytes()) ]) try: response = edge_manager_client.Predict(request) except Exception as e: print('Prediction failed') print(e) # read output tensors and append them to matrix detections = [] for t in response.tensors: deserialized_bytes = np.frombuffer(t.byte_data, dtype=np.float32) detections.append(np.asarray(deserialized_bytes)) # Get the highest confidence inference result index = np.argmax(detections[0]) result = class_labels[index] confidence = detections[0][index] # Print results in local log print('Result is ', result) print('Confidence is ', confidence) # Publish highest confidence result to AWS IoT Core print('Got inference results, publishing to AWS IoT Core') message = {"result": result, "confidence": str(confidence)} request = PublishToIoTCoreRequest() request.topic_name = inference_result_topic request.payload = bytes(json.dumps(message), "utf-8") request.qos = QOS.AT_LEAST_ONCE operation = ipc_client.new_publish_to_iot_core() operation.activate(request) future = operation.get_response() future.result(10) # capture inference results in S3 if enabled if capture_inference: print('Capturing inference data in Amazon S3') now = time.time() seconds = int(now) nanos = int((now - seconds) * 10**9) timestamp = Timestamp(seconds=seconds, nanos=nanos) request = CaptureDataRequest( model_name=model_name, capture_id=str(uuid.uuid4()), inference_timestamp=timestamp, input_tensors=[ Tensor(tensor_metadata=TensorMetadata(name="input", data_type=5, shape=tensor_shape), byte_data=scaled_frame.tobytes()) ], output_tensors=[ Tensor(tensor_metadata=TensorMetadata(name="output", data_type=5, shape=[1, 257]), byte_data=detections[0].tobytes()) ]) try: response = edge_manager_client.CaptureData(request) except Exception as e: print('CaptureData request failed') print(e)