def get_and_print_total_face_count(self):
     while self.perform_human_detection:
         time.sleep(5)
         Logger.logger().info("[INFO D 1]: {}".format(SendReceiveMessages().get_total_face_detected_count()))
         Logger.logger().info("[INFO L 2]: {}".format(SendReceiveMessages().get_face_detected_count_locally()))
         Logger.logger().info("[INFO P 3]: {}".format(SendReceiveMessages().get_face_detected_by_peer()))
         Logger.logger().info(
             "method_for_comparing_local_face_detected_and_global_face_detected: Compute total faces "
             "detected by both cameras: {}".format(SendReceiveMessages().get_total_face_detected_count()))
         if SendReceiveMessages().get_total_face_detected_count() >= MAX_OCCUPANCY:
             Logger.logger().info("Please wait because the occupancy is greater than {}".format(MAX_OCCUPANCY))
Example #2
0
 def test_occupancy_10(self):
     """
     This method validates if occupancy detector can actually correctly calculate the number of people entering the premises on a typical temple day.
     :return:
     """
     human_detector_inst = HumanDetector(
         find_humans_from_video_file_name=
         'videos/occupancy_test_videos/occupancy10.mp4',
         use_pi_camera=False,
         open_display=True)
     self.assertEqual(human_detector_inst.perform_job(), None)
     human_centroid_dict = human_detector_inst.get_human_centroid_dict()
     self.assertEqual(len(human_centroid_dict), 12)
     self.assertEqual(human_centroid_dict[0].direction, Direction.EXIT)
     self.assertEqual(human_centroid_dict[1].direction, Direction.ENTER)
     self.assertEqual(human_centroid_dict[2].direction, Direction.EXIT)
     self.assertEqual(human_centroid_dict[3].direction, Direction.EXIT)
     self.assertEqual(human_centroid_dict[4].direction, Direction.ENTER)
     self.assertEqual(human_centroid_dict[5].direction, Direction.ENTER)
     self.assertEqual(human_centroid_dict[6].direction, Direction.EXIT)
     self.assertEqual(human_centroid_dict[7].direction, Direction.EXIT)
     self.assertEqual(human_centroid_dict[8].direction, Direction.ENTER)
     self.assertEqual(human_centroid_dict[9].direction, Direction.ENTER)
     self.assertEqual(human_centroid_dict[10].direction, Direction.ENTER)
     self.assertEqual(human_centroid_dict[11].direction, Direction.ENTER)
     self.assertEqual(
         SendReceiveMessages().get_face_detected_count_locally(), 2)
     human_detector_inst.clean_up()
     self.__cleanup()
Example #3
0
 def test_one_person_entering(self):
     """
     This method validates if occupancy detector can actually detect one person entering the premises.
     :return:
     """
     human_detector_inst = HumanDetector(
         find_humans_from_video_file_name='videos/one_person_entering.mp4',
         use_pi_camera=False,
         open_display=False)
     self.assertEqual(human_detector_inst.perform_job(), None)
     human_centroid_dict = human_detector_inst.get_human_centroid_dict()
     self.assertEqual(len(human_centroid_dict), 1)
     self.assertEqual(human_centroid_dict[0].direction, Direction.ENTER)
     self.assertEqual(
         SendReceiveMessages().get_face_detected_count_locally(), 1)
     human_detector_inst.clean_up()
     self.__cleanup()
Example #4
0
 def test_one_by_one_crossAbhiSrini(self):
     """
     This method validates if occupancy detector can actually detect 1 person entering and teh other exiting the premises.
     :return:
     """
     human_detector_inst = HumanDetector(
         find_humans_from_video_file_name=
         'videos/TempleVideos/one_by_one_cross_AbhiSrini.mp4',
         use_pi_camera=False,
         open_display=True)
     self.assertEqual(human_detector_inst.perform_job(), None)
     human_centroid_dict = human_detector_inst.get_human_centroid_dict()
     self.assertEqual(len(human_centroid_dict), 2)
     self.assertEqual(human_centroid_dict[0].direction, Direction.ENTER)
     self.assertEqual(human_centroid_dict[1].direction, Direction.EXIT)
     self.assertEqual(
         SendReceiveMessages().get_face_detected_count_locally(), 0)
     human_detector_inst.clean_up()
     self.__cleanup()
Example #5
0
 def test_grouped_exit(self):
     """
     This method validates if occupancy detector can actually detect four persons grouped together exiting the premises.
     :return:
     """
     human_detector_inst = HumanDetector(
         find_humans_from_video_file_name=
         'videos/TempleVideos/grouped_Exit.mp4',
         use_pi_camera=False,
         open_display=False)
     self.assertEqual(human_detector_inst.perform_job(), None)
     human_centroid_dict = human_detector_inst.get_human_centroid_dict()
     self.assertEqual(len(human_centroid_dict), 4)
     self.assertEqual(human_centroid_dict[0].direction, Direction.EXIT)
     self.assertEqual(human_centroid_dict[1].direction, Direction.EXIT)
     self.assertEqual(human_centroid_dict[2].direction, Direction.EXIT)
     self.assertEqual(human_centroid_dict[3].direction, Direction.EXIT)
     self.assertEqual(
         SendReceiveMessages().get_face_detected_count_locally(), -4)
     human_detector_inst.clean_up()
     self.__cleanup()
    def clean_up(self):
        self.perform_human_detection = False
        SendReceiveMessages().cleanup()
        # stop the timer and display FPS information
        self.fps.stop()
        Logger.logger().debug("elapsed time: {:.2f}".format(self.fps.elapsed()))
        Logger.logger().debug("approx. FPS: {:.2f}".format(self.fps.fps()))

        # Close the log file.
        HumanValidator.close_log_file()

        # close any open windows
        cv2.destroyAllWindows()

        # clean up
        Logger.logger().debug("cleaning up...")
        if self.find_humans_from_video_file_name:
            self.video_stream.release()
        else:
            self.video_stream.stop()
        time.sleep(2)
    def __init__(self, find_humans_from_video_file_name=None,
                 use_pi_camera=USE_PI_CAMERA, open_display=OPEN_DISPLAY):
        # initialize the frame dimensions (we'll set them as soon as we read
        # the first frame from the video)
        self.H = None
        self.W = None
        self.video_stream = None
        self.net = None
        self.current_time_stamp = None
        self.frame = None
        self.rgb = None
        self.meter_per_pixel = None
        self.args = None
        parser = argparse.ArgumentParser()
        parser.add_argument("-i", "--peer_ip_address", type=str,
                            help="Provide the IP address of the remote raspberry PI.")
        parser.add_argument("-p", "--peer_port", type=int, help="Provide the server port of the remote raspberry PI.",
                            default=SERVER_PORT)
        parser.add_argument('-d', '--debug', type=bool, help='Enable debug logging.', default=False)
        self.args = parser.parse_args()
        if self.args.debug:
            Logger.set_log_level(logging.DEBUG)
        self.find_humans_from_video_file_name = find_humans_from_video_file_name
        self.use_pi_camera = use_pi_camera
        self.open_display = open_display
        self.perform_human_detection = True

        SendReceiveMessages().perform_job(peer_ip_address=self.args.peer_ip_address)

        # Load Model
        self.load_model()
        # Initialize the camera.
        self.initialize_camera()

        # start the frames per second throughput estimator
        self.fps = FPS().start()
        self.centroid_object_creator = CentroidObjectCreator()