예제 #1
0
    def test_get_stop_true(self):
        """Test that the stop value can be returned if the tracker has been stopped
            --positive test case"""

        tt = TemperatureTracker()
        tt.stop()
        self.assertIsNotNone(tt.get_stop())
예제 #2
0
def main():

    # if you would like to test an additional model, add one to the list below:
    models = ["alwaysai/mobilenet_ssd",
              "alwaysai/ssd_inception_v2_coco_2018_01_28"]

    # if you've added a model, add a new color in as a list of tuples in BGR format
    # to make visualization easier (e.g. [(B, G, R)]).
    colors = [[(66, 68, 179)], [(50, 227, 62)]]

    detectors = []

    # initialize a list to hold temperature data
    TEMP_DATA = []
    temperature_tracker = TemperatureTracker(TEMP_DATA)

    # load all the models (creates a new object detector for each model)
    for model in models:

        # start up a first object detection model
        obj_detect = edgeiq.ObjectDetection(model)
        obj_detect.load(engine=edgeiq.Engine.DNN)

        # track the generated object detection items by storing them in detectors
        detectors.append(obj_detect)

        # print the details of each model to the console
        print("Model:\n{}\n".format(obj_detect.model_id))
        print("Engine: {}".format(obj_detect.engine))
        print("Accelerator: {}\n".format(obj_detect.accelerator))
        print("Labels:\n{}\n".format(obj_detect.labels))

    fps = edgeiq.FPS()

    try:
        with edgeiq.WebcamVideoStream(cam=0) as video_stream, \
                edgeiq.Streamer() as streamer:

            # Allow Webcam to warm up
            time.sleep(2.0)
            fps.start()

            # start the temperature tracker
            temperature_tracker.start()

            # loop detection
            while True:
                frame = video_stream.read()

                text = [""]

                # gather data from the all the detectors
                for i in range(0, len(detectors)):
                    results = detectors[i].detect_objects(
                        frame, confidence_level=.5)
                    object_frame = edgeiq.markup_image(
                        frame, results.predictions, show_labels=False, colors=colors[i])

                    # for the first frame, overwrite the input feed
                    if i == 0:
                        display_frame = object_frame
                    else:

                        # otherwise, append the new marked-up frame to the previous one
                        display_frame = numpy.concatenate(
                            (object_frame, display_frame))

                    # append each prediction
                    for prediction in results.predictions:
                        text.append("Model {} detects {}: {:2.2f}% (inference time: {:1.2f})".format(detectors[i].model_id,
                                                                                                     prediction.label, prediction.confidence * 100, results.duration))

                # get an instance of the cpu temperature
                temperature_tracker.update()

                # send the image frame and the predictions for both
                # prediction models to the output stream
                streamer.send_data(display_frame, text)

                fps.update()

                if streamer.check_exit():
                    break

    finally:
        fps.stop()
        temperature_tracker.stop()
        print("elapsed time: {:.2f}".format(fps.get_elapsed_seconds()))
        print("approx. FPS: {:.2f}".format(fps.compute_fps()))
        summary = temperature_tracker.summary()
        print(summary)

        print(*TEMP_DATA)

        print("Program Ending")
예제 #3
0
def main():

    obj_detect = edgeiq.ObjectDetection("alwaysai/mobilenet_ssd")
    obj_detect.load(engine=edgeiq.Engine.DNN)

    print("Loaded model:\n{}\n".format(obj_detect.model_id))
    print("Engine: {}".format(obj_detect.engine))
    print("Accelerator: {}\n".format(obj_detect.accelerator))
    print("Labels:\n{}\n".format(obj_detect.labels))

    temperature_tracker = TemperatureTracker()

    fps = edgeiq.FPS()

    try:
        with edgeiq.WebcamVideoStream(cam=0) as video_stream, \
                edgeiq.Streamer() as streamer:

            # Allow Webcam to warm up
            time.sleep(2.0)
            fps.start()

            # start the temperature tracker
            temperature_tracker.start()

            # loop detection
            while True:
                frame = video_stream.read()
                results = obj_detect.detect_objects(frame, confidence_level=.5)
                frame = edgeiq.markup_image(frame,
                                            results.predictions,
                                            colors=obj_detect.colors)

                # Generate text to display on streamer
                text = ["Model: {}".format(obj_detect.model_id)]
                text.append("Inference time: {:1.3f} s".format(
                    results.duration))
                text.append("Objects:")

                for prediction in results.predictions:
                    text.append("{}: {:2.2f}%".format(
                        prediction.label, prediction.confidence * 100))

                # get an instance of the cpu temperature
                temperature_tracker.update()

                # gather the current temperature and timestamp and print it
                now = temperature_tracker.now()

                # log block showing current temperature
                print(
                    str(now[0]) + " " +
                    time.strftime('%Y-%m-%d %H:%M:%S', now[1]))

                # details whether the temperature is safe for a Raspberry Pi 4
                if now[0] < temperature_tracker.MAX_TEMP_RASP4:
                    print("Temperature is safe")
                else:
                    print("You should shut down")

                streamer.send_data(frame, text)

                fps.update()

                # exit program if maximum safe temp has been reached
                if now[0] >= temperature_tracker.MAX_TEMP_RASP4:
                    print("Maximum safe temperature reached, stopping program")
                    break

                if streamer.check_exit():
                    break

    finally:
        fps.stop()

        # stop the temperature tracker
        temperature_tracker.stop()

        # print summary details for inference time
        print("elapsed time: {:.2f}".format(fps.get_elapsed_seconds()))
        print("approx. FPS: {:.2f}".format(fps.compute_fps()))

        # print summary details for the temperature tracker
        summary = temperature_tracker.summary()
        print(summary)

        print("Program Ending")