Beispiel #1
0
    def _detect(self, frame, gray):
        """Detects faces, compares them registered faces, and detects hands for gesture
            recognition if a match is found.
            :param frame: a BGR color image for display
            :param gray: a grayscale copy of the passed BGR frame
            :returns: (out_frame, username, gesture) the processed frame
                for display on webpage, the detected user, the detected gesture
        """
        username = ""
        gesture = "0"
        num_fingers = 0

        if self.gesture_tracker is None:  # not currently tracking hands
            faces = self._findFaces(frame)

            for (startX, startY, endX, endY) in faces:
                # text is displayed at y coordinate
                y = startY - 10 if startY - 10 > 10 else startY + 10

                # gray_face sometimes gets converted back to ndarray, throwing an error
                # I do not know why
                try:
                    gray_face = cv2.UMat(gray, [startY, endY], [startX, endX])
                except:
                    gray_face = gray[startY:endY, startX:endX]

                # optional resize for slightly improved performance
                gray_face = cv2.resize(gray_face, (100, 100))
                user_id, confidence = self.recognizer.predict(gray_face)
                gray = cv2.UMat.get(gray)

                # mask detected face region with solid black to avoid false positives in hand detection
                gray[startY:endY, startX:endX] = self.black_mask[startY:endY,
                                                                 startX:endX]

                # for LBPH recognizer, lower confidence scores indicate better results
                if confidence <= 80:  # user is recognized
                    db = DBHelper()
                    username = db.getUsernameById(user_id)
                    cv2.putText(frame, username, (startX, y), self.font, .6,
                                (225, 105, 65), 2)
                else:
                    # face belongs to unknown user
                    cv2.putText(frame, "unknown", (startX, y), self.font, .6,
                                (0, 0, 255), 2)

            # a user is recognized and hand detection begins
            if username is not "" and faces:
                hands = self.hand_classifier.detectMultiScale(gray, 1.3, 5)

                # detected hand region is resized to allow for tracking an open hand
                for (x, y, w, h) in hands:
                    x_mid = (w // 2)
                    y = int(y - h * 1.3)
                    x = int(x - x_mid * 1.5)
                    w = int(w + 3 * x_mid)
                    h = int(h * 2 + h * 0.7)
                    cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 0, 255),
                                  2)

                    # only attempt to recognize hand gesture if background model is finished calibrating
                    if self.bg_model.calibrated:
                        self.gesture_tracker = GestureTracker(
                            frame, (x, y, w, h))

            # if no faces are in the frame, assume the frame is background
            if not self.bg_model.calibrated and not faces:
                self.bg_model.runAverage(frame)

        else:  # hand has been detected and is being tracked by gesture_tracker
            timed_out, (x, y, w, h) = self.gesture_tracker.update(frame)
            if timed_out:
                self.gesture_tracker = None
            try:
                gray = cv2.UMat.get(gray)
                difference = cv2.absdiff(
                    self.bg_model.background.astype("uint8")[y:y + h, x:x + w],
                    gray[y:y + h, x:x + w])
                foreground = cv2.threshold(difference, 25, 255,
                                           cv2.THRESH_BINARY)[1]
                gest, frame[y:y + h, x:x +
                            w] = self.gesture_recognizer.recognize(foreground)
                self.last_gest = str(gest)
            except:
                pass

        return (frame, username, gesture)