Пример #1
0
def saveFaceImage(capture, frequency, display, drawFaces):
    img_count = 0

    # Create the directory in which we record the training examples.
    if not os.path.exists(args.recording_emotion):
        os.makedirs(args.recording_emotion)

    while True:
        flag, frame = capture.read()

        if flag:
            faceCoordinates = face_detection.getFaceCoordinates(frame)
        if faceCoordinates:
            image = emotionrecognition.preprocess(frame, faceCoordinates)
            # Save the image that will later be used for training.
            scipy.misc.imsave(
                os.path.join(args.recording_emotion,
                             args.recording_emotion + str(img_count) + '.png'),
                image)

            if display:
                showFrame(frame, faceCoordinates, None, drawFaces)
            img_count = img_count + 1

        time.sleep(frequency)
Пример #2
0
def detectedAndDisplayFaces(capture, net, display=False, drawFaces=False):
    recognition = True
    # Flag gives us some information about the capture
    # Frame is the webcam frame (a numpy image)
    flag, frame = capture.read()
    # Not sure if there is an error from the cam if we should lock the screen
    if flag:
        faceCoordinates = face_detection.getFaceCoordinates(frame)
        if faceCoordinates and recognition:
            emotion = recogintionWork(frame, faceCoordinates, net)
        else:
            emotion = None
        if display:
            showFrame(frame, faceCoordinates, emotion, drawFaces)
        if faceCoordinates:
            return True
    else:
        return True
Пример #3
0
def detectedAndDisplayFaces(capture, net, display=False, drawFaces=False):
  recognition = True
  # Flag gives us some information about the capture
  # Frame is the webcam frame (a numpy image)
  flag, frame = capture.read()
  # Not sure if there is an error from the cam if we should lock the screen
  if flag:
    faceCoordinates = face_detection.getFaceCoordinates(frame)
    if faceCoordinates and recognition:
      emotion = recogintionWork(frame, faceCoordinates, net)
    else:
      emotion = None
    if display:
      showFrame(frame, faceCoordinates, emotion, drawFaces)
    if faceCoordinates:
      return True
  else:
    return True
Пример #4
0
def saveFaceImage(capture, frequency, display, drawFaces):
  img_count = 0

  # Create the directory in which we record the training examples.
  if not os.path.exists(args.recording_emotion):
    os.makedirs(args.recording_emotion)

  while True:
    flag, frame = capture.read()

    if flag:
      faceCoordinates = face_detection.getFaceCoordinates(frame)
    if faceCoordinates:
      image = emotionrecognition.preprocess(frame, faceCoordinates)
      # Save the image that will later be used for training.
      scipy.misc.imsave(os.path.join(args.recording_emotion,
                                     args.recording_emotion + str(img_count) + '.png'),
                        image)

      if display:
        showFrame(frame, faceCoordinates, None, drawFaces)
      img_count = img_count + 1

    time.sleep(frequency)