class CamRead(Thread): """ CamRead Class The CamRead Class processes the frames from the local camera and identifies known users and intruders. """ def __init__(self): """ Initializes the class. """ self.Helpers = Helpers("CamRead") super(CamRead, self).__init__() self.Helpers.logger.info("CamRead Class initialization complete.") def run(self): """ Runs the module. """ fps = "" framecount = 0 time1 = 0 time2 = 0 mesg = "" self.font = cv2.FONT_HERSHEY_SIMPLEX self.color = (0, 0, 0) # Starts the TassAI module self.TassAI = TassAI() # Connects to the camera self.TassAI.connect() # Loads the required models self.TassAI.load_models() # Loads known images self.TassAI.load_known() self.publishes = [None] * (len(self.TassAI.faces_database) + 1) # Starts the socket server soc = self.Sockets.connect(self.Helpers.confs["Socket"]["host"], self.Helpers.confs["Socket"]["port"]) while True: try: t1 = time.perf_counter() # Reads the current frame frame = self.TassAI.camera.get(0.65) width = frame.shape[1] # Processes the frame detections = self.TassAI.process(frame) # Writes header to frame cv2.putText(frame, "TassAI Camera", (10, 30), self.font, 0.5, self.color, 1, cv2.LINE_AA) # Writes date to frame cv2.putText(frame, str(datetime.now()), (10, 50), self.font, 0.3, self.color, 1, cv2.LINE_AA) if len(detections): for roi, landmarks, identity in zip(*detections): frame, label = self.TassAI.draw_detection_roi( frame, roi, identity) #frame = self.TassAI.draw_detection_keypoints(frame, roi, landmarks) if label is "Unknown": label = 0 mesg = "TassAI identified intruder" else: mesg = "TassAI identified User #" + str(label) # If iotJumpWay publish for user is in past if (self.publishes[int(label)] is None or (self.publishes[int(label)] + (1 * 120)) < time.time()): # Update publish time for user self.publishes[int(label)] = time.time() # Send iotJumpWay notification self.iot.channelPub( "Sensors", { "Type": "TassAI", "Sensor": "USB Camera", "Value": label, "Message": mesg }) # Send iotJumpWay notification self.iot.channelPub( "Cameras", { "Type": "TassAI", "Sensor": "USB Camera", "Value": label, "Message": mesg }) cv2.putText(frame, fps, (width - 120, 26), cv2.FONT_HERSHEY_SIMPLEX, 0.3, self.color, 1, cv2.LINE_AA) # Streams the modified frame to the socket server encoded, buffer = cv2.imencode('.jpg', frame) soc.send(base64.b64encode(buffer)) # FPS calculation framecount += 1 if framecount >= 15: fps = "Stream: {:.1f} FPS".format(time1 / 15) framecount = 0 time1 = 0 time2 = 0 t2 = time.perf_counter() elapsedTime = t2 - t1 time1 += 1 / elapsedTime time2 += elapsedTime except KeyboardInterrupt: self.TassAI.lcv.release() break
class CamRead(Thread): """ CamRead Class Reads frames from a USB camera and streams them to a socket stream. """ def __init__(self): """ Initializes the class. """ self.Helpers = Helpers("CamRead") super(CamRead, self).__init__() self.Helpers.logger.info("CamRead class initialized.") def run(self): """ Runs the module. """ self.identified = 0 # Starts the TassAI module self.TassAI = TassAI() self.TassAI.cv() self.TassAI.connect() self.TassAI.ncs() # Starts the socket server soc = self.Sockets.connect(self.Helpers.confs["Socket"]["host"], self.Helpers.confs["Socket"]["port"]) fps = "" framecount = 0 count = 0 time1 = 0 time2 = 0 self.publishes = [None] * (len(self.TassAI.NCS1.encoded) + 1) while True: time.sleep(0.05) try: t1 = time.perf_counter() # Reads the current frame frame = self.TassAI.camera.get(0.65) # Processes the frame raw, frame = self.TassAI.NCS1.prepareImg(frame) width = frame.shape[1] # Gets faces and coordinates faces, coords = self.TassAI.NCS1.faces(frame) # Writes header to frame cv2.putText(frame, "TassAI", (10, 30), self.TassAI.font, 0.7, self.TassAI.color, 2, cv2.LINE_AA) # Writes date to frame cv2.putText(frame, str(datetime.now()), (10, 50), self.TassAI.font, 0.5, self.TassAI.color, 2, cv2.LINE_AA) if len(coords): i = 0 mesg = "" # Loops through coordinates for (i, face) in enumerate(coords): # Gets facial landmarks coordinates coordsi = face_utils.shape_to_np(face) # Looks for matches/intruders known, distance = self.TassAI.NCS1.match(raw, faces[i]) if known: mesg = "TassAI identified User #" + str(known) else: mesg = "TassAI identified intruder" # If iotJumpWay publish for user is in past if (self.publishes[int(known)] is None or (self.publishes[int(known)] + (1 * 20)) < time.time()): # Update publish time for user self.publishes[int(known)] = time.time() # Send iotJumpWay notification self.iot.channelPub( "Sensors", { "Type": "TassAI", "Sensor": "USB Camera", "Value": known, "Message": mesg }) # Send iotJumpWay notification self.iot.channelPub( "Cameras", { "Type": "TassAI", "Sensor": "USB Camera", "Value": known, "Message": mesg }) # Draws facial landmarks for (x, y) in coordsi: cv2.circle(frame, (x, y), 2, (0, 255, 0), -1) # Adds user name to frame i += 1 cv2.putText(frame, fps, (width - 170, 30), cv2.FONT_HERSHEY_SIMPLEX, 0.5, self.TassAI.color, 1, cv2.LINE_AA) # Streams the modified frame to the socket server encoded, buffer = cv2.imencode('.jpg', frame) soc.send(base64.b64encode(buffer)) # FPS calculation framecount += 1 if framecount >= 15: fps = "Stream: {:.1f} FPS".format(time1 / 15) framecount = 0 time1 = 0 time2 = 0 t2 = time.perf_counter() elapsedTime = t2 - t1 time1 += 1 / elapsedTime time2 += elapsedTime time.sleep(0.05) except KeyboardInterrupt: self.TassAI.camera.release() break