def reset_count_variables(self): Logger.logger().info( "Reseting __total_faces_detected_locally, __total_faces_detected_by_peer, " "total_faces_detected counts to 0.") self.__total_faces_detected_locally = 0 self.__total_faces_detected_by_peer = 0 self.total_faces_detected = 0
def __send_face_detected_count_via_socket(self, sock, peer_server_address): """ This method sends the face detected count via the connected socket. :param sock: socket id. :param peer_server_address: str :return: """ curr_count = 0 while SendReceiveMessages.run_program: try: if self.__total_faces_detected_locally != curr_count: # Send the count Logger.logger().debug( "Client method_for_transmitting_face_detected_locally: Sending total_" "faces_detected_locally={} to peer ip={}, " "port={}.".format(self.__total_faces_detected_locally, *peer_server_address)) sock.sendall( str(self.__total_faces_detected_locally).encode()) curr_count = self.__total_faces_detected_locally time.sleep(1) except: Logger.logger().info( 'Client method_for_transmitting_face_detected_locally: Exception: ' 'closing client socket') sock.close()
def method_for_transmitting_face_detected_locally(self, peer_ip_address, peer_port=SERVER_PORT): """ This method is used for transmitting the __total_faces_detected_locally count to peer. :param peer_ip_address: str :param peer_port: str :return: """ # Create a TCP/IP socket # Connect the socket to the port where the server is listening peer_server_address = (peer_ip_address, peer_port) if not peer_ip_address or not len(peer_ip_address): return successfully_connected_to_peer = False while SendReceiveMessages.run_program and not successfully_connected_to_peer: try: with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: Logger.logger().info( "Connecting to peer {}...".format(peer_server_address)) s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.connect(peer_server_address) successfully_connected_to_peer = True self.__send_face_detected_count_via_socket( s, peer_server_address) except Exception as e: Logger.logger().error(type(e).__name__ + ': ' + str(e)) print("Exception in user code:") print("-" * 60) traceback.print_exc(file=sys.stdout) print("-" * 60) return_value = False time.sleep(1)
def log_total_num(self, Num_of_people): if not self.csv_file: self.initialize_log_file() Logger.logger().info(Num_of_people) info = "{}, {}\n".format(self.todays_date.hour, Num_of_people) self.csv_file.write(info) self.csv_file.flush() self.close_log_file()
def decrement_face_detected_locally(self): """ The caller SHALL invoke this API to decrement the face count detected locally. :return: """ self.__total_faces_detected_locally -= 1 Logger.logger().info( "Decrementing the total faces detected locally to {}.".format( self.__total_faces_detected_locally))
def thread_for_face_tracker(self): return_value = True while self.perform_human_detection: try: self.loop_over_streams() except Exception as e: Logger.logger().error("Caught an exception while looping over streams {}, rebooting....".format( type(e).__name__ + ': ' + str(e))) return_value = False return return_value
def get_and_print_total_face_count(self): while self.perform_human_detection: time.sleep(5) Logger.logger().info("[INFO D 1]: {}".format(SendReceiveMessages().get_total_face_detected_count())) Logger.logger().info("[INFO L 2]: {}".format(SendReceiveMessages().get_face_detected_count_locally())) Logger.logger().info("[INFO P 3]: {}".format(SendReceiveMessages().get_face_detected_by_peer())) Logger.logger().info( "method_for_comparing_local_face_detected_and_global_face_detected: Compute total faces " "detected by both cameras: {}".format(SendReceiveMessages().get_total_face_detected_count())) if SendReceiveMessages().get_total_face_detected_count() >= MAX_OCCUPANCY: Logger.logger().info("Please wait because the occupancy is greater than {}".format(MAX_OCCUPANCY))
def method_for_comparing_local_face_detected_and_global_face_detected( self): """ This method is used to compare the face detected locally vs the face detected by peer and take corresponding action. :return: """ Logger.logger().info( "Running thread method_for_comparing_local_face_detected_and_global_face_detected..." ) while SendReceiveMessages.run_program: self.total_faces_detected = self.__total_faces_detected_locally + self.__total_faces_detected_by_peer if self.total_faces_detected >= MAX_OCCUPANCY: PlayAudio.play_audio_file() time.sleep(5)
def yield_a_human_tracker_object(cls, objects): for (objectID, centroid) in objects.items(): # check to see if a trackable object exists for the current # object ID human_tracker_object = cls.human_tracking_dict.get(objectID, None) # if there is no existing trackable object, create one if not human_tracker_object: Logger.logger().debug("Creating a new speed tracker object with object id = {}.".format(objectID)) human_tracker_object = HumanTracker(objectID, centroid) cls.human_tracking_dict[objectID] = human_tracker_object else: human_tracker_object.centroids.append(centroid) human_tracker_object.timestamp_list.append(datetime.now()) yield human_tracker_object, objectID, centroid
def loop_over_detections_fetch_tracker_update_trackers_list( self, detections): """ Loop over the detections. If confidence in the detections is greater than minimum confidence level, 1. Extract the index of the class label from the detections list. 2. Make sure that the class label is a person. 3. Compute the (x,y) coordinates of the bounding box. 4. Construct a dlib rectangle object from the bounding box. 5. Start dlib correlation tracker. 6. Append the tracker to the list of trackers. """ # loop over the detections for i in np.arange(0, detections.shape[2]): # extract the confidence (i.e., probability) associated # with the prediction confidence = detections[0, 0, i, 2] # filter out weak detections by ensuring the `confidence` # is greater than the minimum confidence if confidence > MIN_CONFIDENCE: # extract the index of the class label from the # detections list idx = int(detections[0, 0, i, 1]) # if the class label is not a car, ignore it if CLASSES[idx] != "person": Logger.logger().debug( "class label {} is not a person.".format(CLASSES[idx])) continue self.total_frames += 1 # compute the (x, y)-coordinates of the bounding box # for the object box = detections[0, 0, i, 3:7] * np.array( [self.W, self.H, self.W, self.H]) (startX, startY, endX, endY) = box.astype("int") cv2.rectangle(self.frame, (startX, startY), (endX, endY), (0, 255, 0), 2) # update the bounding box rectangles list self.rects.append(box.astype("int"))
def handle_the_case_where_grace_time_for_tracking_is_over( cls, now, human_tracker_object, send_recv_msg_instance, keep_dict_items): """ This method handles the case where the grace time (TIMEOUT_FOR_TRACKER) for the tracker object is over. :param now: timestamp :param human_tracker_object: Instance of type HumanTracker. :param keep_dict_items: Preserve dictionary items. (for debug purpose) :return: """ if human_tracker_object.estimated and human_tracker_object.logged and not keep_dict_items: # Delete this object from speed tracking dict. Logger.logger().debug( "Deleting objectId {} from the human_tracking_dict.".format( human_tracker_object.objectID)) cls.clear_object_from_speed_tracking_dict( human_tracker_object.objectID) else: Logger.logger().debug( "Computing direction for objectId {} because there are no recorded" " movements for this object in human_tracking_dict.".format( human_tracker_object.objectID)) cls.compute_direction(human_tracker_object) human_tracker_object.estimated = True # Finally log it. Logger.logger().debug( "Perform logging for objectId {} found the human_tracking_dict.". format(human_tracker_object.objectID)) HumanValidator.validate_column_movement(human_tracker_object, now, None, human_tracker_object.objectID, send_recv_msg_instance)
def __init__(self, find_humans_from_video_file_name=None, use_pi_camera=USE_PI_CAMERA, open_display=OPEN_DISPLAY): # initialize the frame dimensions (we'll set them as soon as we read # the first frame from the video) self.H = None self.W = None self.video_stream = None self.net = None self.current_time_stamp = None self.frame = None self.rgb = None self.meter_per_pixel = None self.args = None parser = argparse.ArgumentParser() parser.add_argument("-i", "--peer_ip_address", type=str, help="Provide the IP address of the remote raspberry PI.") parser.add_argument("-p", "--peer_port", type=int, help="Provide the server port of the remote raspberry PI.", default=SERVER_PORT) parser.add_argument('-d', '--debug', type=bool, help='Enable debug logging.', default=False) self.args = parser.parse_args() if self.args.debug: Logger.set_log_level(logging.DEBUG) self.find_humans_from_video_file_name = find_humans_from_video_file_name self.use_pi_camera = use_pi_camera self.open_display = open_display self.perform_human_detection = True SendReceiveMessages().perform_job(peer_ip_address=self.args.peer_ip_address) # Load Model self.load_model() # Initialize the camera. self.initialize_camera() # start the frames per second throughput estimator self.fps = FPS().start() self.centroid_object_creator = CentroidObjectCreator()
def grab_next_frame(self): """ 1. Grab the next frame from the stream. 2. store the current timestamp, and store the new date. """ if self.find_humans_from_video_file_name: if self.video_stream.isOpened(): ret, self.frame = self.video_stream.read() else: Logger.logger().info("Unable to open video stream...") raise ValueError else: self.frame = self.video_stream.read() if self.frame is None: return self.current_time_stamp = datetime.now() # resize the frame self.frame = imutils.resize(self.frame, width=FRAME_WIDTH_IN_PIXELS) # width = FRAME_WIDTH_IN_PIXELS # height = self.frame.shape[0] # keep original height # dim = (width, height) # self.frame = cv2.resize(self.frame, dim, interpolation = cv2.INTER_AREA) self.rgb = cv2.cvtColor(self.frame, cv2.COLOR_BGR2RGB)
def initialize_camera(self): """ Initialize the video stream and allow the camera sensor to warmup. """ if self.find_humans_from_video_file_name: self.find_humans_from_video_file_name = \ os.path.join(os.path.dirname(__file__), self.find_humans_from_video_file_name) Logger.logger().info("Reading the input video file {}.".format(self.find_humans_from_video_file_name)) self.video_stream = cv2.VideoCapture(self.find_humans_from_video_file_name) if not self.video_stream: Logger.logger().error("cv2.VideoCapture() returned None.") raise ValueError # self.video_stream.set(cv2.CAP_PROP_FPS, int(10)) elif self.use_pi_camera: Logger.logger().info("Warming up Raspberry PI camera connected via the PCB slot.") self.video_stream = VideoStream(usePiCamera=True).start() else: Logger.logger().debug("Setting video capture device to {}.".format(VIDEO_DEV_ID)) self.video_stream = VideoStream(src=VIDEO_DEV_ID).start() time.sleep(2.0)
def load_model(self): """ Load our serialized model from disk """ Logger.logger().info("Loading model name:{}, proto_text:{}.".format(MODEL_NAME, PROTO_TEXT_FILE)) self.net = cv2.dnn.readNetFromCaffe(os.path.join( os.path.dirname(os.path.realpath(__file__)), PROTO_TEXT_FILE), os.path.join( os.path.dirname(os.path.realpath(__file__)), MODEL_NAME)) if self.use_pi_camera: # Set the target to the MOVIDIUS NCS stick connected via USB # Prerequisite: https://docs.openvinotoolkit.org/latest/_docs_install_guides_installing_openvino_raspbian.html Logger.logger().info("Setting MOVIDIUS NCS stick connected via USB as the target to run the model.") self.net.setPreferableTarget(cv2.dnn.DNN_TARGET_MYRIAD) else: Logger.logger().info("Setting target to CPU.") self.net.setPreferableTarget(cv2.dnn.DNN_TARGET_CPU)
def clean_up(self): self.perform_human_detection = False SendReceiveMessages().cleanup() # stop the timer and display FPS information self.fps.stop() Logger.logger().debug("elapsed time: {:.2f}".format(self.fps.elapsed())) Logger.logger().debug("approx. FPS: {:.2f}".format(self.fps.fps())) # Close the log file. HumanValidator.close_log_file() # close any open windows cv2.destroyAllWindows() # clean up Logger.logger().debug("cleaning up...") if self.find_humans_from_video_file_name: self.video_stream.release() else: self.video_stream.stop() time.sleep(2)
def format_and_send_email(cls, local_enter_csv_sheet=ENTER_LOG_FILE_NAME, peer_enter_csv_sheet=PEER_ENTER_LOG_FILE_NAME, local_exit_csv_sheet=EXIT_LOG_FILE_NAME, peer_exit_csv_sheet=PEER_EXIT_LOG_FILE_NAME, weekly_enter_csv=WEEKLY_LOG_FILE_NAME, monthly_enter_csv=MONTHLY_LOG_FILE_NAME, merged_enter_csv=MERGED_ENTER_CSV, merged_exit_csv=MERGED_EXIT_CSV): """ This method combines the local and peer enter and exit csv file to create one pair, enter and exit csv file. Copies the enter csv to the weekly csv and the weekly csv to monthly csv depending what day and day it is. :param merged_exit_csv: :param merged_enter_csv: :param local_enter_csv_sheet: :param peer_enter_csv_sheet: :param local_exit_csv_sheet: :param peer_exit_csv_sheet: :param weekly_enter_csv: :param monthly_enter_csv: :return: """ if MERGE_FILES: Logger.logger().info("[INFO] Merging Files...") cls.merge_files(file1=local_enter_csv_sheet, file2=peer_enter_csv_sheet, file3=merged_enter_csv) cls.merge_files(file1=local_exit_csv_sheet, file2=peer_exit_csv_sheet, file3=merged_exit_csv) else: Logger.logger().info("[INFO] NOT Merging Files...") day = datetime.datetime.now().strftime("%A") lines = [] dailyfile = open(local_enter_csv_sheet, "r") for line in dailyfile: lines.append(line) dailyfile.close() weeklyfile = open(weekly_enter_csv, "a") try: lines.pop(0) except Exception as e: Logger.logger().info(type(e).__name__ + ': ' + str(e)) pass for line in lines: weeklyfile.write(line) weeklyfile.close() lines.clear() if day == DAY: weeklyfile = open(weekly_enter_csv, "r") for line in weeklyfile: lines.append(line) weeklyfile.close() monthlyfile = open(monthly_enter_csv, "a") try: lines.pop(0) except Exception as e: Logger.logger().info(type(e).__name__ + ': ' + str(e)) pass for line in lines: monthlyfile.write(line) lines.clear() monthlyfile.close() Logger.logger().info("[INFO] Sending Email...") cls.email_send() Logger.logger().info("[INFO] Email Sent...") if CLEAR_FILES: Logger.logger().info("[INFO] Clearing file(s)...") cls.clear_all_files()
def method_for_receiving_face_detected_by_peer(self, local_ip_address='0.0.0.0', local_port=SERVER_PORT): """ This method is used for receiving the face count detected by peer. :return: """ while SendReceiveMessages.run_program: Logger.logger().info( "Running method_for_receiving_face_detected_by_peer...") # Initialize a TCP server socket using SOCK_STREAM # Bind the socket to the port server_address = (local_ip_address, local_port) try: Logger.logger().info( 'Server method_for_receiving_face_detected_by_peer: starting up on {} port {}' .format(*server_address)) with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) s.bind(server_address) s.listen(1) s.setblocking(True) s.settimeout(5.0) Logger.logger().info( 'Server {} method_for_receiving_face_detected_by_peer: Waiting for a ' 'connection'.format(server_address)) conn, addr = s.accept() with conn: Logger.logger().info( 'Server {}: received connection from peer {}.'. format(server_address, addr)) while SendReceiveMessages.run_program: Logger.logger().info("Run program is set to True.") data = conn.recv(MAX_NUMBER_OF_RCV_BYTES) if data: Logger.logger().debug( 'Server {}: received {} from peer {}.'. format(server_address, data, addr)) data = data.decode('utf-8') self.__total_faces_detected_by_peer = int(data) Logger.logger().debug( "Server {}: total_faces_detected_by_peer = {}" .format( server_address, self.__total_faces_detected_by_peer)) else: Logger.logger().debug( "server method_for_receiving_face_detected_by_peer: data is Null" ) except Exception as e: Logger.logger().error(type(e).__name__ + ': ' + str(e)) time.sleep(1)
def email_send(cls, enter_csv_sheet=ENTER_LOG_FILE_NAME, exit_csv_sheet=EXIT_LOG_FILE_NAME, weekly_enter_csv=WEEKLY_LOG_FILE_NAME, monthly_enter_csv=MONTHLY_LOG_FILE_NAME, peer_enter_log_file=PEER_ENTER_LOG_FILE_NAME, peer_exit_log_file=PEER_EXIT_LOG_FILE_NAME, merged_enter_csv=MERGED_ENTER_CSV, merged_exit_csv=MERGED_EXIT_CSV, hourly_log_file=HOURLY_CSV): """ This method sends an email with the provided credentials. :param hourly_log_file: :param peer_exit_log_file: str :param peer_enter_log_file: str :param monthly_enter_csv: str :param weekly_enter_csv: str :param enter_csv_sheet: str :param exit_csv_sheet: str :param merged_enter_csv: str :param merged_exit_csv: str :return: """ email_sent_status = False day = datetime.datetime.now().strftime("%A") date = datetime.date.today().day Logger.logger().debug("Running send_email function") enter_csv_sheet = os.path.join(os.path.dirname(__file__), enter_csv_sheet) exit_csv_sheet = os.path.join(os.path.dirname(__file__), exit_csv_sheet) weekly_enter_csv = os.path.join(os.path.dirname(__file__), weekly_enter_csv) monthly_enter_csv = os.path.join(os.path.dirname(__file__), monthly_enter_csv) peer_enter_log_file = os.path.join(os.path.dirname(__file__), peer_enter_log_file) peer_exit_log_file = os.path.join(os.path.dirname(__file__), peer_exit_log_file) merged_enter_csv = os.path.join(os.path.dirname(__file__), merged_enter_csv) merged_exit_csv = os.path.join(os.path.dirname(__file__), merged_exit_csv) hourly_log_file = os.path.join(os.path.dirname(__file__), hourly_log_file) Logger.logger().debug(enter_csv_sheet) Logger.logger().debug(exit_csv_sheet) Logger.logger().debug(weekly_enter_csv) Logger.logger().debug(monthly_enter_csv) Logger.logger().debug(peer_enter_log_file) Logger.logger().debug(peer_exit_log_file) Logger.logger().debug(merged_enter_csv) Logger.logger().debug(merged_exit_csv) Logger.logger().debug(hourly_log_file) msg = MIMEMultipart() sender_email = "*****@*****.**" receiver_email = "[email protected], [email protected], [email protected], [email protected], [email protected]" password = "******" # keyring.get_password("gmail", "*****@*****.**") msg['From'] = '*****@*****.**' msg[ 'To'] = "[email protected], [email protected], [email protected], [email protected], [email protected]" msg['Date'] = formatdate(localtime=True) msg['Subject'] = 'Here is the Occupancy List for {}'.format(datetime.date.today()) total_count_of_people_entered = cls.get_count_file(merged_enter_csv) total_count_of_people_exited = cls.get_count_file(merged_exit_csv) body = 'Dear Board Members,\n' attachmentsList = [enter_csv_sheet, exit_csv_sheet, peer_enter_log_file, peer_exit_log_file, merged_enter_csv, merged_exit_csv, hourly_log_file] if day == DAY: attachmentsList.append(weekly_enter_csv) if date == DATE: attachmentsList.append(monthly_enter_csv) for each_file_path in attachmentsList: file_name = each_file_path.split("/")[-1] part = MIMEBase('application', "octet-stream") part.set_payload(open(each_file_path, "rb").read()) encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment', filename=file_name) msg.attach(part) if len(attachmentsList) == 2: body = body + 'Please find the attached daily occupancy tracker sheet for your reference.\n' elif len(attachmentsList) == 3: body = body + 'Please find the attached daily and weekly occupancy tracker sheet for your reference.\n' elif len(attachmentsList) == 4: body = body + 'Please find the attached daily, weekly and monthly occupancy tracker sheet for your reference.\n' if total_count_of_people_entered >= total_count_of_people_exited: body = body + 'Total People that visited the temple today: {}\n'.format(total_count_of_people_entered) else: body = body + 'Total People that visited the temple today: {}\n'.format(total_count_of_people_exited) number_of_unmonitored_people = total_count_of_people_entered - total_count_of_people_exited if number_of_unmonitored_people > 0: body = body + 'Approximately {} people exited through unmonitored doors today.\n'.format( number_of_unmonitored_people) elif number_of_unmonitored_people < 0: body = body + 'Approximately {} people entered through unmonitored doors today.\n'.format( abs(number_of_unmonitored_people)) else: pass body = body + "*Note: The results from the occupancy tracker are 98% accurate as it usually does not count little infants/toddlers.*\n" body = body + "\nThanks and regards,\nPI_Defense" msg.attach(MIMEText(body, "plain")) context = ssl.create_default_context() try: with smtplib.SMTP_SSL("smtp.gmail.com", 465, context=context) as server: server.login(sender_email, password) server.sendmail(sender_email, receiver_email.split(","), msg.as_string()) except Exception as e: print(type(e).__name__ + ': ' + str(e)) else: email_sent_status = True finally: return email_sent_status