Exemple #1
0
    def __init__(self):
        #Imaging device - must be a connected camera (not an ip camera or mjpeg
        #stream)
        self.camera = Camera(camera=0)  #first camera by default

        self.w, self.h = 0, 0
        self.pressed = 0
        #Containerized analysis of recieved image frames (an openMDAO assembly)
        #is defined next.

        #This assembly is designed to handle all image & signal analysis,
        #such as face detection, forehead isolation, time series collection,
        #heart-beat detection, etc.

        #Basically, everything that isn't communication
        #to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        #Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"

        #Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {
            "s": self.toggle_search,
            "d": self.toggle_display_plot,
            "f": self.write_csv
        }
 def __init__(self):
     #Imaging device - must be a connected camera (not an ip camera or mjpeg
     #stream)
     self.camera = Camera(camera=0) #first camera by default
     self.w,self.h = 0,0
     
     #Containerized analysis of recieved image frames (an openMDAO assembly)
     #is defined next.
     
     #This assembly is designed to handle all image & signal analysis,
     #such as face detection, forehead isolation, time series collection,
     #heart-beat detection, etc. 
     
     #Basically, everything that isn't communication
     #to the camera device or part of the GUI
     self.processor = findFaceGetPulse(bpm_limits = [50,160],
                                       data_spike_limit = 13.,
                                       face_detector_smoothness = 10.)
     
     #Init parameters for the cardiac data plot
     self.bpm_plot = False
     self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"
     
     #Maps keystrokes to specified methods
     #(A GUI window must have focus for these to work)
     self.key_controls = {"s" : self.toggle_search,
                     "d" : self.toggle_display_plot}
Exemple #3
0
    def __init__(self, args):
        # assumes connected camera uses cam0 by default
        serial = args.serial
        baud = args.baud
        self.send_serial = False
        self.send_udp = False
        if serial:
            self.send_serial = True
            if not baud:
                baud = 9600
            else:
                baud = int(baud)
            # self.serial = Serial(port=serial, baudrate=baud)

        udp = args.udp
        if udp:
            self.send_udp = True
            if ":" not in udp:
                ip = udp
                port = 5005
            else:
                ip, port = udp.split(":")
                port = int(port)
            self.udp = (ip, port)
            self.sock = socket.socket(socket.AF_INET, # Internet
                 socket.SOCK_DGRAM) # UDP

        self.cameras = []
        self.selected_cam = 0
        for i in range(3):
            camera = Camera(camera=i)  # first camera by default
            if camera.valid or not len(self.cameras):
                self.cameras.append(camera)
            else:
                break
        self.w, self.h = 0, 0
        self.pressed = 0
        # Containerized analysis of recieved image frames (an openMDAO assembly)
        # is defined next.

        # This assembly is designed to handle all image & signal analysis,
        # such as face detection, forehead isolation, time series collection,
        # heart-beat detection, etc.

        # Basically, everything that isn't communication
        # to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)
        
        # Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Data display - raw signal (top) and PSD (bottom)"

        # Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {#"s": self.toggle_search,
                             "d": self.toggle_display_plot,
                             "c": self.toggle_cam,
                             "f": self.write_csv}
Exemple #4
0
    def __init__(self):
        self.cap = cv2.VideoCapture(0)
        self.fourcc = cv2.cv.CV_FOURCC(*'XVID')
        self.out = cv2.VideoWriter('data/' + subject + '/' + video +'/output.avi', -1, 9, (width, height))
        self.detector = dlib.get_frontal_face_detector()
        self.predictor = dlib.shape_predictor("shape_predictor_68_face_landmarks.dat")
        # Imaging device - must be a connected camera (not an ip camera or mjpeg
        # stream)
        self.camera = Camera(camera=0)  # first camera by default

        self.w, self.h = 0, 0
        self.pressed = 0
        # Containerized analysis of recieved image frames (an openMDAO assembly)
        # is defined next.

        # This assembly is designed to handle all image & signal analysis,
        # such as face detection, forehead isolation, time series collection,
        # heart-beat detection, etc.

        # Basically, everything that isn't communication
        # to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        # Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"

        # Maps keystrokes to specified methods
        # (A GUI window must have focus for these to work)
        self.key_controls = {"s": self.toggle_search,
                             "d": self.toggle_display_plot,
                             "f": self.write_csv}
Exemple #5
0
    def __init__(self, args):
        # Imaging device - must be a connected camera (not an ip camera or mjpeg
        # stream)
        serial = args.serial
        baud = args.baud
        self.send_serial = False
        self.send_udp = False
        if serial:
            self.send_serial = True
            if not baud:
                baud = 9600
            else:
                baud = int(baud)
            self.serial = Serial(port=serial, baudrate=baud)

        udp = args.udp
        if udp:
            self.send_udp = True
            if ":" not in udp:
                ip = udp
                port = 5005
            else:
                ip, port = udp.split(":")
                port = int(port)
            self.udp = (ip, port)
            self.sock = socket.socket(
                socket.AF_INET,  # Internet
                socket.SOCK_DGRAM)  # UDP

        self.cameras = []
        self.selected_cam = 0
        for i in range(3):
            camera = Camera(camera=i)  # first camera by default
            if camera.valid or not len(self.cameras):
                self.cameras.append(camera)
            else:
                break
        self.w, self.h = 0, 0
        self.pressed = 0

        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        # Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Data display - raw signal (top) and PSD (bottom)"

        # Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {
            "s": self.toggle_search,
            "d": self.toggle_display_plot,
            "c": self.toggle_cam,
            "f": self.write_csv
        }
Exemple #6
0
 def __init__(self):
     #Imaging device - must be a connected camera (not an ip camera or mjpeg
     #stream)
     self.camera = Camera(camera=0) #first camera by default
     self.w,self.h = 0,0
     self.pressed = 0
     
     self.current_centroid = [0.0,0.0]
     
     self.centroid_1 = [0.0,0.0]
     self.centroid_2 = [0.0,0.0]
     
     self.centroid_1_active = False
     self.centroid_2_active = False
     
     #Maps keystrokes to specified methods
     #(A GUI window must have focus for these to work)
     self.key_controls = {"1" : self.set_centroid_1,
                          "2" : self.set_centroid_2}
Exemple #7
0
    def __init__(self, args):
        #tried to use droidcam from playstore,this won't work like that
        serial = args.serial
        baud = args.baud
        self.send_serial = False
        self.send_udp = False
        if serial:
            self.send_serial = True
            if not baud:
                baud = 9600
            else:
                baud = int(baud)
            self.serial = Serial(port=serial, baudrate=baud)

        udp = args.udp
        if udp:
            self.send_udp = True
            if ":" not in udp:
                ip = udp
                port = 5005
            else:
                ip, port = udp.split(":")
                port = int(port)
            self.udp = (ip, port)
            self.sock = socket.socket(
                socket.AF_INET,  # Internet
                socket.SOCK_DGRAM)  # UDP

        self.cameras = []
        self.selected_cam = 0
        for i in range(3):
            camera = Camera(camera=i)  # first camera by default
            if camera.valid or not len(self.cameras):
                self.cameras.append(camera)
            else:
                break
        self.w, self.h = 0, 0
        self.pressed = 0

        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        self.bpm_plot = False
        self.plot_title = "Data display - raw signal (top) and PSD (bottom)"

        self.key_controls = {
            "s": self.toggle_search,
            "d": self.toggle_display_plot,
            "c": self.toggle_cam,
            "f": self.write_csv
        }
Exemple #8
0
    def __init__(self, args):
        self.COUNTER = 0
        self.cameras = []
        self.selected_cam = 0
        for i in range(3):
            camera = Camera(camera=i)  # first camera by default
            if camera.valid or not len(self.cameras):
                self.cameras.append(camera)
            else:
                break
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        self.plot_title = "Real-Time Heart Rate"
    def __init__(self, args):
        # Imaging device - must be a connected camera (not an ip camera or mjpeg
        # stream)
        serial = args.serial
        baud = args.baud
        self.send_serial = False
        self.send_udp = False
        if serial:
            self.send_serial = True
            if not baud:
                baud = 9600
            else:
                baud = int(baud)
            self.serial = Serial(port=serial, baudrate=baud)

        self.cameras = []
        self.selected_cam = 0
        for i in range(3):
            camera = Camera(camera=i)  # first camera by default
            if camera.valid or not len(self.cameras):
                self.cameras.append(camera)
            else:
                break
        self.w, self.h = 0, 0
        self.pressed = 0
        # Containerized analysis of recieved image frames (an openMDAO assembly)
        # is defined next.

        # This assembly is designed to handle all image & signal analysis,
        # such as face detection, forehead isolation, time series collection,
        # heart-beat detection, etc.

        # Basically, everything that isn't communication
        # to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        # Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Data display - raw signal (top) and PSD (bottom)"

        # Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {
            "s": self.toggle_search,
            "d": self.toggle_display_plot
        }
Exemple #10
0
    def __init__(self, args):
        # Imaging device - must be a connected camera (not an ip camera or mjpeg
        # stream)
        serial = args.serial
        baud = args.baud
        self.send_serial = False
        if serial:
            self.send_serial = True
            if not baud:
                baud = 9600
            else:
                baud = int(baud)
            self.serial = Serial(port=serial, baudrate=baud)

        self.cameras = []
        self.selected_cam = 0
        for i in range(3):
            camera = Camera(camera=i)  # first camera by default
            if camera.valid or not len(self.cameras):
                self.cameras.append(camera)
            else:
                break
        self.w, self.h = 0, 0
        self.pressed = 0
        # Containerized analysis of recieved image frames (an openMDAO assembly)
        # is defined next.

        # This assembly is designed to handle all image & signal analysis,
        # such as face detection, forehead isolation, time series collection,
        # heart-beat detection, etc.

        # Basically, everything that isn't communication
        # to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        self.key_controls = {}
Exemple #11
0
class getPulseApp(object):
    """
    Python application that finds a face in a webcam stream, then isolates the
    forehead.

    Then the average green-light intensity in the forehead region is gathered 
    over time, and the detected person's pulse is estimated.
    """
    eyesArea = []
    perclos = []
    eyesCalssfier = CascadeClassifier("cascades/haarcascade_eye.xml")
    def __init__(self):
        #Imaging device - must be a connected camera (not an ip camera or mjpeg
        #stream)
        self.camera = Camera(camera=0) #first camera by default
        self.w,self.h = 0,0
        self.pressed = 0
        #Containerized analysis of recieved image frames (an openMDAO assembly)
        #is defined next.

        #This assembly is designed to handle all image & signal analysis,
        #such as face detection, forehead isolation, time series collection,
        #heart-beat detection, etc. 

        #Basically, everything that isn't communication
        #to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits = [50,160],
                                          data_spike_limit = 2500.,
                                          face_detector_smoothness = 10.)  

        #Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"

        #Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {"s" : self.toggle_search,
                             "d" : self.toggle_display_plot,
                             "f" : self.write_csv}
        
    def write_csv(self):
        """
        Writes current data to a csv file
        """
        bpm = " " + str(int(self.processor.measure_heart.bpm))
        fn = str(datetime.datetime.now()).split(".")[0] + bpm + " BPM.csv"
        
        data = np.array([self.processor.fft.times, 
                         self.processor.fft.samples]).T
        np.savetxt(fn, data, delimiter=',')
        


    def toggle_search(self):
        """
        Toggles a motion lock on the processor's face detection component.

        Locking the forehead location in place significantly improves
        data quality, once a forehead has been sucessfully isolated. 
        """
        state = self.processor.find_faces.toggle()
        if not state:
        	self.processor.fft.reset()
        print "face detection lock =",not state

    def toggle_display_plot(self):
        """
        Toggles the data display.
        """
        if self.bpm_plot:
            print "bpm plot disabled"
            self.bpm_plot = False
            destroyWindow(self.plot_title)
        else:
            print "bpm plot enabled"
            self.bpm_plot = True
            self.make_bpm_plot()
            moveWindow(self.plot_title, self.w,0)

    def make_bpm_plot(self):
        """
        Creates and/or updates the data display
        """
        plotXY([[self.processor.fft.times, 
                 self.processor.fft.samples],
                [self.processor.fft.even_times[4:-4], 
                 self.processor.measure_heart.filtered[4:-4]],
                [self.processor.measure_heart.freqs, 
                 self.processor.measure_heart.fft]], 
               labels = [False, False, True],
               showmax = [False,False, "bpm"], 
               label_ndigits = [0,0,0],
               showmax_digits = [0,0,1],
               skip = [3,3,4],
               name = self.plot_title, 
               bg = self.processor.grab_faces.slices[0])

    def key_handler(self):    
        """
        Handle keystrokes, as set at the bottom of __init__()

        A plotting or camera frame window must have focus for keypresses to be
        detected.
        """

        self.pressed = waitKey(10) & 255 #wait for keypress for 10 ms
        if self.pressed == 27: #exit program on 'esc'
            print "exiting..."
            self.camera.cam.release()
            exit()

        for key in self.key_controls.keys():
            if chr(self.pressed) == key:
                self.key_controls[key]()
    
    def get_perclos(self):
#         eyesCalssfier = "cascades/haarcascade_eye.xml"
        eye = self.getEyesArea(self.processor.grab_faces.slices[0])
        if eye!=None:
            self.eyesArea.append(eye)
        if len(self.eyesArea)>=100:
            tempPerclos = self.cal_perclos(self.eyesArea, len(self.eyesArea))
            self.perclos.append(tempPerclos)
            self.eyesArea[:] = self.eyesArea[0:75]
            if len(self.perclos)>=10:
                self.perclos[:] = self.perclos[1:10]
            
            
    def getEyesArea(self,face,classfier=eyesCalssfier):
        eyes = []
        eyesRects = classfier.detectMultiScale(face)
        # 如果检测到人眼,返回,否则返回空
        if len(eyesRects)>0:
            for (ex,ey,ew,eh) in eyesRects:
                eyes.append(ew*eh)
            aveEyeArea = float(sum(eyes))/len(eyes)
            return aveEyeArea
        else:
            return None
    def handleEyesArea(self,areas):
        max_areas=max(areas)
        min_areas=min(areas)
        #眼球极值
        D=max_areas-min_areas

        #最大值阈值
        threshold_max=min_areas+D*0.8
        #大于阈值的都与取做最大值的平均
        j=0
        vj=0
        for area in areas:
            if area>=threshold_max:
                vj=vj+area
                j=j+1
        #相对最大值
        relative_max=1.0*vj/j
        return min_areas,relative_max
    #计算PERCLOS值
    def cal_perclos(self,areas,count):
        min_areas,relative_max=self.handleEyesArea(areas)
        pi=[]
        eyeclose=0 #眼睛闭上的个数
        for mi in areas:
            temp=abs(1.0*(mi-min_areas)/(relative_max-min_areas))
#             print "temp=",temp
            if temp>0.2:
                pi.append(0)            
            else:            
                pi.append(1)
                eyeclose+=1
        sigma_pi=0
        sum = 1.0
        for pii in pi:
            if pii==1:
                sigma_pi=sigma_pi+1
        perclos=1.0*sigma_pi/count
        return perclos        
    def main_loop(self):
        """
        Single iteration of the application's main loop.
        """
        # Get current image frame from the camera
        frame = self.camera.get_frame()
        self.h,self.w,_c = frame.shape
        

        #display unaltered frame
        # imshow("Original",frame)

        #set current image frame to the processor's input
        self.processor.frame_in = frame
        #process the image frame to perform all needed analysis
        self.processor.run()
        #collect the output frame for display
        output_frame = self.processor.frame_out

        #show the processed/annotated output frame
        # imshow("Processed",output_frame)

        #create and/or update the raw data display if needed
        if self.bpm_plot:
            self.make_bpm_plot()

        #handle any key presses
        self.key_handler()
        self.get_perclos()
Exemple #12
0
class AnalyseEmotion(object):
    """
    Python application that finds a face in a webcam stream, then isolates the
    forehead.

    Then the average green-light intensity in the forehead region is gathered 
    over time, and the detected person's pulse is estimated.
    """
    def __init__(self):
        #Imaging device - must be a connected camera (not an ip camera or mjpeg
        #stream)
        self.camera = Camera(camera=0)  #first camera by default

        self.w, self.h = 0, 0
        self.pressed = 0
        #Containerized analysis of recieved image frames (an openMDAO assembly)
        #is defined next.

        #This assembly is designed to handle all image & signal analysis,
        #such as face detection, forehead isolation, time series collection,
        #heart-beat detection, etc.

        #Basically, everything that isn't communication
        #to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        #Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"

        #Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {
            "s": self.toggle_search,
            "d": self.toggle_display_plot,
            "f": self.write_csv
        }

    def write_csv(self):
        """
        Writes current data to a csv file
        """
        bpm = " " + str(int(self.processor.measure_heart.bpm))
        fn = str(datetime.datetime.now()).split(".")[0] + bpm + " BPM.csv"

        data = np.array([self.processor.fft.times,
                         self.processor.fft.samples]).T
        np.savetxt(fn, data, delimiter=',')

    def toggle_search(self):
        """
        Toggles a motion lock on the processor's face detection component.

        Locking the forehead location in place significantly improves
        data quality, once a forehead has been sucessfully isolated. 
        """
        state = self.processor.find_faces.toggle()
        if not state:
            self.processor.fft.reset()
        print "face detection lock =", not state

    def toggle_display_plot(self):
        """
        Toggles the data display.
        """
        if self.bpm_plot:
            print "bpm plot disabled"
            self.bpm_plot = False
            destroyWindow(self.plot_title)
        else:
            print "bpm plot enabled"
            self.bpm_plot = True
            self.make_bpm_plot()
            moveWindow(self.plot_title, self.w, 0)

    def key_handler(self):
        """
        Handle keystrokes, as set at the bottom of __init__()

        A plotting or camera frame window must have focus for keypresses to be
        detected.
        """
        global line2
        self.pressed = waitKey(10) & 255  #wait for keypress for 10 ms
        if self.pressed == 27 and self.processor.fft.ready == True:  #exit program on 'esc'
            print "exiting..."
            self.camera.cam.release()
            #a = sum(self.processor.fft.samples)/float(len(self.processor.fft.samples))
            print >> f, str(self.processor.show_bpm_text.bpm)
            print >> f, str(line2)
            #f.write(str(a) + "\n")
            #f.write(str(line2) + "\n")
            exit()

        for key in self.key_controls.keys():
            if chr(self.pressed) == key:
                self.key_controls[key]()

    def main_loop(self):
        """
        Single iteration of the application's main loop.
        """
        # Get current image frame from the camera
        frame = self.camera.get_frame()
        self.h, self.w, _c = frame.shape

        #display unaltered frame
        #imshow("Original",frame)

        #set current image frame to the processor's input
        self.processor.frame_in = frame
        #process the image frame to perform all needed analysis
        self.processor.run()
        #collect the output frame for display
        output_frame = self.processor.frame_out

        #show the processed/annotated output frame
        imshow("Processed", output_frame)

        #create and/or update the raw data display if needed
        #if self.bpm_plot:
        #self.make_bpm_plot()
        #print(self.processor.fft.samples)
        if (self.processor.fft.ready):
            print "Ready to hit escape"
        else:
            print ".",

        img = cv.QueryFrame(capture)
        if img:
            image = DetectRedEyes(img, faceCascade)
            cv.ShowImage("camera", image)
        #handle any key presses
        self.key_handler()
class getPulseApp(object):
    """
    Python application that finds a face in a webcam stream, then isolates the
    forehead.
    
    Then the average green-light intensity in the forehead region is gathered 
    over time, and the detected person's pulse is estimated.
    """
    def __init__(self):
        #Imaging device - must be a connected camera (not an ip camera or mjpeg
        #stream)
        self.camera = Camera(camera=0) #first camera by default
        self.w,self.h = 0,0
        
        #Containerized analysis of recieved image frames (an openMDAO assembly)
        #is defined next.
        
        #This assembly is designed to handle all image & signal analysis,
        #such as face detection, forehead isolation, time series collection,
        #heart-beat detection, etc. 
        
        #Basically, everything that isn't communication
        #to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits = [50,160],
                                          data_spike_limit = 13.,
                                          face_detector_smoothness = 10.)
        
        #Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"
        
        #Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {"s" : self.toggle_search,
                        "d" : self.toggle_display_plot}
        
    
    def toggle_search(self):
        """
        Toggles a motion lock on the processor's face detection component.
        
        Locking the forehead location in place significantly improves
        data quality, once a forehead has been sucessfully isolated. 
        """
        state = self.processor.find_faces.toggle()
        print "face detection lock =",not state
    
    def toggle_display_plot(self):
        """
        Toggles the data display.
        """
        if self.bpm_plot:
            print "bpm plot disabled"
            self.bpm_plot = False
            destroyWindow(self.plot_title)
        else:
            print "bpm plot enabled"
            self.bpm_plot = True
            self.make_bpm_plot()
            moveWindow(self.plot_title, self.w,0)
    
    def make_bpm_plot(self):
        """
        Creates and/or updates the data display
        """
        plotXY([[self.processor.fft.times, 
                 self.processor.fft.samples],
            [self.processor.fft.even_times[4:-4], 
             self.processor.heart.filtered[4:-4]],
                [self.processor.heart.freqs, 
                 self.processor.heart.fft]], 
               labels = [False, False, True],
               showmax = [False,False, "bpm"], 
               name = self.plot_title, 
               bg = self.processor.grab_faces.slices[0])
    
    def key_handler(self):    
        """
        Handle keystrokes, as set at the bottom of __init__()
        
        A plotting or camera frame window must have focus for keypresses to be
        detected.
        """
        pressed = waitKey(10) & 255 #wait for keypress for 10 ms
        if pressed == 27: #exit program on 'esc'
            quit()
        for key in self.key_controls.keys():
            if chr(pressed) == key:
                self.key_controls[key]()
                
    def main_loop(self):
        """
        Single iteration of the application's main loop.
        """
        # Get current image frame from the camera
        frame = self.camera.get_frame()
        self.h,self.w,_c = frame.shape
        
        #display unaltered frame
        #imshow("Original",frame)
        
        #set current image frame to the processor's input
        self.processor.frame_in = frame
        #process the image frame to perform all needed analysis
        self.processor.run()
        #collect the output frame for display
        output_frame = self.processor.frame_out
        
        #show the processed/annotated output frame
        imshow("Processed",output_frame)
        
        #create and/or update the raw data display if needed
        if self.bpm_plot:
            self.make_bpm_plot()
        
        #handle any key presses
        self.key_handler()
class getPulseApp(object):
    """
    Python application that finds a face in a webcam stream, then isolates the
    forehead.
    
    Then the average green-light intensity in the forehead region is gathered 
    over time, and the detected person's pulse is estimated.
    """
    def __init__(self):
        #Imaging device - must be a connected camera (not an ip camera or mjpeg
        #stream)
        self.camera = Camera(camera=0)  #first camera by default
        self.w, self.h = 0, 0

        #Containerized analysis of recieved image frames (an openMDAO assembly)
        #is defined next.

        #This assembly is designed to handle all image & signal analysis,
        #such as face detection, forehead isolation, time series collection,
        #heart-beat detection, etc.

        #Basically, everything that isn't communication
        #to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=13.,
                                          face_detector_smoothness=10.)

        #Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"

        #Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {
            "s": self.toggle_search,
            "d": self.toggle_display_plot
        }

    def toggle_search(self):
        """
        Toggles a motion lock on the processor's face detection component.
        
        Locking the forehead location in place significantly improves
        data quality, once a forehead has been sucessfully isolated. 
        """
        state = self.processor.find_faces.toggle()
        print "face detection lock =", not state

    def toggle_display_plot(self):
        """
        Toggles the data display.
        """
        if self.bpm_plot:
            print "bpm plot disabled"
            self.bpm_plot = False
            destroyWindow(self.plot_title)
        else:
            print "bpm plot enabled"
            self.bpm_plot = True
            self.make_bpm_plot()
            moveWindow(self.plot_title, self.w, 0)

    def make_bpm_plot(self):
        """
        Creates and/or updates the data display
        """
        plotXY([[self.processor.fft.times, self.processor.fft.samples],
                [
                    self.processor.fft.even_times[4:-4],
                    self.processor.heart.filtered[4:-4]
                ], [self.processor.heart.freqs, self.processor.heart.fft]],
               labels=[False, False, True],
               showmax=[False, False, "bpm"],
               name=self.plot_title,
               bg=self.processor.grab_faces.slices[0])

    def key_handler(self):
        """
        Handle keystrokes, as set at the bottom of __init__()
        
        A plotting or camera frame window must have focus for keypresses to be
        detected.
        """
        pressed = waitKey(10) & 255  #wait for keypress for 10 ms
        if pressed == 27:  #exit program on 'esc'
            quit()
        for key in self.key_controls.keys():
            if chr(pressed) == key:
                self.key_controls[key]()

    def main_loop(self):
        """
        Single iteration of the application's main loop.
        """
        # Get current image frame from the camera
        frame = self.camera.get_frame()
        self.h, self.w, _c = frame.shape

        #display unaltered frame
        #imshow("Original",frame)

        #set current image frame to the processor's input
        self.processor.frame_in = frame
        #process the image frame to perform all needed analysis
        self.processor.run()
        #collect the output frame for display
        output_frame = self.processor.frame_out

        #show the processed/annotated output frame
        imshow("Processed", output_frame)

        #create and/or update the raw data display if needed
        if self.bpm_plot:
            self.make_bpm_plot()

        #handle any key presses
        self.key_handler()
class AnalyseEmotion(object):
    """
    Python application that finds a face in a webcam stream, then isolates the
    forehead.

    Then the average green-light intensity in the forehead region is gathered 
    over time, and the detected person's pulse is estimated.
    """
    def __init__(self):
        #Imaging device - must be a connected camera (not an ip camera or mjpeg
        #stream)
        self.camera = Camera(camera=0) #first camera by default
        
        self.w,self.h = 0,0
        self.pressed = 0
        #Containerized analysis of recieved image frames (an openMDAO assembly)
        #is defined next.

        #This assembly is designed to handle all image & signal analysis,
        #such as face detection, forehead isolation, time series collection,
        #heart-beat detection, etc. 

        #Basically, everything that isn't communication
        #to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits = [50,160],
                                          data_spike_limit = 2500.,
                                          face_detector_smoothness = 10.)  

        #Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"

        #Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {"s" : self.toggle_search,
                             "d" : self.toggle_display_plot,
                             "f" : self.write_csv}
        
    def write_csv(self):
        """
        Writes current data to a csv file
        """
        bpm = " " + str(int(self.processor.measure_heart.bpm))
        fn = str(datetime.datetime.now()).split(".")[0] + bpm + " BPM.csv"
        
        data = np.array([self.processor.fft.times, 
                         self.processor.fft.samples]).T
        np.savetxt(fn, data, delimiter=',')
        


    def toggle_search(self):
        """
        Toggles a motion lock on the processor's face detection component.

        Locking the forehead location in place significantly improves
        data quality, once a forehead has been sucessfully isolated. 
        """
        state = self.processor.find_faces_toggle()
        if not state:
        	#self.processor.fft.reset()
            self.processor.fft = []
        print "face detection lock =",not state

    def toggle_display_plot(self):
        """
        Toggles the data display.
        """
        if self.bpm_plot:
            print "bpm plot disabled"
            self.bpm_plot = False
            destroyWindow(self.plot_title)
        else:
            print "bpm plot enabled"
            self.bpm_plot = True
            self.make_bpm_plot()
            moveWindow(self.plot_title, self.w,0)

    def make_bpm_plot(self):
        """
        Creates and/or updates the data display
        """
        plotXY([[self.processor.fft.times, 
                 self.processor.fft.samples],
                [self.processor.fft.even_times[4:-4], 
                 self.processor.measure_heart.filtered[4:-4]],
                [self.processor.measure_heart.freqs, 
                 self.processor.measure_heart.fft]], 
               labels = [False, False, True],
               showmax = [False,False, "bpm"], 
               label_ndigits = [0,0,0],
               showmax_digits = [0,0,1],
               skip = [3,3,4],
               name = self.plot_title, 
               bg = self.processor.grab_faces.slices[0])

    def key_handler(self):    
        """
        Handle keystrokes, as set at the bottom of __init__()

        A plotting or camera frame window must have focus for keypresses to be
        detected.
        """

        self.pressed = waitKey(10) & 255 #wait for keypress for 10 ms
        if self.pressed == 27: #exit program on 'esc'
            print "exiting..."
            self.camera.cam.release()
            exit()

        for key in self.key_controls.keys():
            if chr(self.pressed) == key:
                self.key_controls[key]()

    def main_loop(self):
        """
        Single iteration of the application's main loop.
        """
        # Get current image frame from the camera
        frame = self.camera.get_frame()
        self.h,self.w,_c = frame.shape
        

        #display unaltered frame
        #imshow("Original",frame)

        #set current image frame to the processor's input
        self.processor.frame_in = frame
        #process the image frame to perform all needed analysis
        self.processor.run(frame)
        #collect the output frame for display
        output_frame = self.processor.frame_out

        #show the processed/annotated output frame
        imshow("Processed",output_frame)

        #create and/or update the raw data display if needed
        global smileneighbour, mqLoop, smilecount, eyetot
        #if self.bpm_plot:
            #self.make_bpm_plot()
        if mqLoop >= 1:
            x = str(datetime.datetime.now())
            sm.write(str(md.datestr2num(x)) + " " + str(smileneighbour) + "\n")
            e.write(str(md.datestr2num(x)) + " " + str(eyetot) + "\n")
            #hr.write(str(md.datestr2num(x)) + " " + str(self.processor.show_bpm_text.bpm) + "\n")
            hr.write(str(md.datestr2num(x)) + " " + str(self.processor.bpm) + "\n")
            pulse_estimation_log.write(str(int(round(time.time() * 1000))) + " " + str(self.processor.bpm) + "\n")
            smileneighbour+= 2*eyetot
            smileneighbour/=100
            print "bpm: " + str(self.processor.bpm)
            #if (self.processor.show_bpm_text.bpm) > dhr:
            if (self.processor.bpm) > dhr:
                #print (self.processor.fft.samples[-1]/2, self.processor.fft.samples[-1]-dhr/2)
                #overbeat = (self.processor.fft.samples[-1]-dhr)*(self.processor.fft.samples[-1]-dhr)
                #smileneighbour += (self.processor.show_bpm_text.bpm-dhr)
                smileneighbour += (self.processor.bpm - dhr)
            
            
            f.write(str(md.datestr2num(x)) + " " + str(smileneighbour) + "\n")
            mqLoop = 0
        else:
            mqLoop+= 0.9    
        img = cv.QueryFrame(capture)    
        smileneighbour = 0
        eyetot = 0
        #if img:
        #    image = DetectRedEyes(img, faceCascade, smileCascade, eyeCascade)
        #    cv.ShowImage("camera", image)
        #handle any key presses
        self.key_handler()
Exemple #16
0
class getPulseApp(object):
    """
    Python application that finds a face in a webcam stream, then isolates the
    forehead.
    
    Then the average green-light intensity in the forehead region is gathered
    over time, and the detected person's pulse is estimated.
    """
    def __init__(self):
        #Imaging device - must be a connected camera (not an ip camera or mjpeg
        #stream)
        self.camera = Camera(camera=0) #first camera by default
        self.w,self.h = 0,0
        self.pressed = 0
        
        self.current_centroid = [0.0,0.0]
        
        self.centroid_1 = [0.0,0.0]
        self.centroid_2 = [0.0,0.0]
        
        self.centroid_1_active = False
        self.centroid_2_active = False
        
        #Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {"1" : self.set_centroid_1,
                             "2" : self.set_centroid_2}
    
    def set_centroid_1(self):
        """
        Sets centroid 1 to the current centroid position.
        If centroid 1 is not currently active, makes it active.
        """
        if not self.centroid_1_active:
            self.centroid_1_active = True
        
        self.centroid_1 = self.current_centroid
    
    def set_centroid_2(self):
        """
        Sets centroid 2 to the current centroid position.
        If centroid 2 is not currently active, makes it active.
        """
        if not self.centroid_2_active:
            self.centroid_2_active = True
        
        self.centroid_2 = self.current_centroid
    
    def key_handler(self):
        """
        Handle keystrokes, as set at the bottom of __init__()
        
        A plotting or camera frame window must have focus for keypresses to be
        detected.
        """
        
        self.pressed = waitKey(1) & 255 #wait for keypress for 10 ms
        if self.pressed == 27: #exit program on 'esc'
            print "exiting..."
            self.camera.cam.release()
            exit()
        
        for key in self.key_controls.keys():
            if chr(self.pressed) == key:
                self.key_controls[key]()
    
    def main_loop(self):
        """
        Single iteration of the application's main loop.
        """
        # Get current image frame from the camera
        frame = self.camera.get_frame()
        self.h,self.w,_c= frame.shape
        
        self.current_centroid = centroid(np.sum(frame,2)/3)
        
        #display unaltered frame
        #imshow("Original",frame)
        
        #collect the output frame for display
        output_frame = process_image(frame,self.current_centroid,self.centroid_1,self.centroid_2,self.centroid_1_active,self.centroid_2_active)
        
        #show the processed/annotated output frame
        imshow("Processed",output_frame)
        
        #handle any key presses
        self.key_handler()
Exemple #17
0
class AnalyseEmotion(object):
    """
    Python application that finds a face in a webcam stream, then isolates the
    forehead.

    Then the average green-light intensity in the forehead region is gathered 
    over time, and the detected person's pulse is estimated.
    """

    def __init__(self):
        self.cap = cv2.VideoCapture(0)
        self.fourcc = cv2.cv.CV_FOURCC(*'XVID')
        self.out = cv2.VideoWriter('data/' + subject + '/' + video +'/output.avi', -1, 9, (width, height))
        self.detector = dlib.get_frontal_face_detector()
        self.predictor = dlib.shape_predictor("shape_predictor_68_face_landmarks.dat")
        # Imaging device - must be a connected camera (not an ip camera or mjpeg
        # stream)
        self.camera = Camera(camera=0)  # first camera by default

        self.w, self.h = 0, 0
        self.pressed = 0
        # Containerized analysis of recieved image frames (an openMDAO assembly)
        # is defined next.

        # This assembly is designed to handle all image & signal analysis,
        # such as face detection, forehead isolation, time series collection,
        # heart-beat detection, etc.

        # Basically, everything that isn't communication
        # to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        # Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"

        # Maps keystrokes to specified methods
        # (A GUI window must have focus for these to work)
        self.key_controls = {"s": self.toggle_search,
                             "d": self.toggle_display_plot,
                             "f": self.write_csv}

    def write_csv(self):
        """
        Writes current data to a csv file
        """
        bpm = " " + str(int(self.processor.measure_heart.bpm))
        fn = str(datetime.datetime.now()).split(".")[0] + bpm + " BPM.csv"

        data = np.array([self.processor.fft.times,
                         self.processor.fft.samples]).T
        np.savetxt(fn, data, delimiter=',')

    def toggle_search(self):
        """
        Toggles a motion lock on the processor's face detection component.

        Locking the forehead location in place significantly improves
        data quality, once a forehead has been sucessfully isolated. 
        """
        state = self.processor.find_faces.toggle()
        if not state:
            self.processor.fft.reset()
        print "face detection lock =", not state

    def toggle_display_plot(self):
        """
        Toggles the data display.
        """
        if self.bpm_plot:
            print "bpm plot disabled"
            self.bpm_plot = False
            destroyWindow(self.plot_title)
        else:
            print "bpm plot enabled"
            self.bpm_plot = True
            self.make_bpm_plot()
            moveWindow(self.plot_title, self.w, 0)

    def make_bpm_plot(self):
        """
        Creates and/or updates the data display
        """
        plotXY([[self.processor.fft.times,
                 self.processor.fft.samples],
                [self.processor.fft.even_times[4:-4],
                 self.processor.measure_heart.filtered[4:-4]],
                [self.processor.measure_heart.freqs,
                 self.processor.measure_heart.fft]],
               labels=[False, False, True],
               showmax=[False, False, "bpm"],
               label_ndigits=[0, 0, 0],
               showmax_digits=[0, 0, 1],
               skip=[3, 3, 4],
               name=self.plot_title,
               bg=self.processor.grab_faces.slices[0])

    def key_handler(self):
        """
        Handle keystrokes, as set at the bottom of __init__()

        A plotting or camera frame window must have focus for keypresses to be
        detected.
        """

        self.pressed = waitKey(1) & 0xFF  # wait for keypress for 10 ms
        if self.pressed == 27:  # exit program on 'esc'
            print "exiting..."
            self.cap.release()
            self.out.release()
            self.camera.cam.release()
            exit()

        for key in self.key_controls.keys():
            if chr(self.pressed) == key:
                self.key_controls[key]()

    def main_loop(self):
        """
        Single iteration of the application's main loop.
        """

        # Get current image frame from the camera
        frame = self.camera.get_frame()
        self.h, self.w, _c = frame.shape

        # display unaltered frame
        # imshow("Original",frame)

        # set current image frame to the processor's input
        self.processor.frame_in = frame
        # process the image frame to perform all needed analysis
        self.processor.run()
        # collect the output frame for display
        output_frame = self.processor.frame_out

        # show the processed/annotated output frame
        imshow("Processed", output_frame)

        # create and/or update the raw data display if needed
        global smileneighbour, mqLoop, smilecount, eyetot
        # if self.bpm_plot:
        # self.make_bpm_plot()
        x = str(datetime.datetime.now())

        gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
        rects = self.detector(gray, 0)
        #####Record self-report#####
        key = cv2.waitKey(1) & 0xFF
        if key == ord("."):
            c.write(str(md.datestr2num(x)) + " " + str(500) + "\n")
            ############################

        for rect in rects:
            shape = self.predictor(gray, rect)
            shape = face_utils.shape_to_np(shape)

            shape2 = shape.copy()
            newline = ""
            for (w,z) in shape2:
                cv2.circle(frame, (w, z), 1, (0, 0, 255), -1)
                new = str((w,z)) + " "
                newline += new

            #######################
            ########Raw Data#######
            #######################

            raw.write(str(md.datestr2num(x)) + newline + "\n")

            #######################
            ######New Things#######

            leftBrow = shape[21]
            rightBrow = shape[22]
            distance = dist.euclidean(leftBrow, rightBrow)

            upper1 = shape[37]
            lower1 = shape[41]
            right1 = shape[38]
            upper2 = shape[43]
            lower2 = shape[47]
            right2 = shape[44]
            average = (dist.euclidean(upper1, lower1) * dist.euclidean(upper1, right1) + dist.euclidean(upper2,
                                                                                                        lower2) * dist.euclidean(
                upper2, right2))

            leftmouth = shape[48]
            rightmouth = shape[54]

            upperlip = shape[62]
            lowerlip = shape[66]

            ##########End##########
            #######################
            #Time, Frown, Eye size, Mouth width, Mouth heigth, Brow Raise, Length of face, and Width of face
            all.write(str(md.datestr2num(x)) + " " + str(distance) + " " + str(average) + " " + str(
                dist.euclidean(leftmouth, rightmouth)) + " " + str(dist.euclidean(upperlip, lowerlip)) + " " + str(
                (dist.euclidean(shape[24], shape[44]) + dist.euclidean(shape[19], shape[37]))) +  " " + str(dist.euclidean(shape[27], shape[8])) + " " + str(dist.euclidean(shape[2], shape[14])) + "\n")
            #######################
            ######Single File######
        cv2.imshow("Frame", frame)

        if mqLoop >= 1:

            sm.write(str(md.datestr2num(x)) + " " + str(smileneighbour) + "\n")
            e.write(str(md.datestr2num(x)) + " " + str(eyetot) + "\n")
            hr.write(str(md.datestr2num(x)) + " " + str(self.processor.show_bpm_text.bpm) + "\n")
            smileneighbour += 2 * eyetot
            smileneighbour /= 100
            if (self.processor.show_bpm_text.bpm) > dhr:
                # print (self.processor.fft.samples[-1]/2, self.processor.fft.samples[-1]-dhr/2)
                # overbeat = (self.processor.fft.samples[-1]-dhr)*(self.processor.fft.samples[-1]-dhr)
                smileneighbour += (self.processor.show_bpm_text.bpm - dhr)

            f.write(str(md.datestr2num(x)) + " " + str(smileneighbour) + "\n")
            mqLoop = 0
        else:
            mqLoop += 0.9
        img = cv.QueryFrame(capture)
        smileneighbour = 0
        eyetot = 0
        if img:
            image = DetectRedEyes(img, faceCascade, smileCascade, eyeCascade)
            cv.ShowImage("camera", image)
        # handle any key presses
        self.key_handler()
Exemple #18
0
    def __init__(self, args):

        # Imaging device - must be a connected camera (not an ip camera or mjpeg
        # stream)
        serial = args.serial
        baud = args.baud
        self.send_serial = False
        self.send_udp = False
        self.video_flag = False
        if serial:
            self.send_serial = True
            if not baud:
                baud = 9600
            else:
                baud = int(baud)
            self.serial = Serial(port=serial, baudrate=baud)

        udp = args.udp
        if udp:
            self.send_udp = True
            if ":" not in udp:
                ip = udp
                port = 5005
            else:
                ip, port = udp.split(":")
                port = int(port)
            self.udp = (ip, port)
            self.sock = socket.socket(
                socket.AF_INET,  # Internet
                socket.SOCK_DGRAM)  # UDP

        self.cameras = []
        self.selected_cam = 0
        for i in range(3):
            camera = Camera(camera=i)  # first camera by default
            if camera.valid or not len(self.cameras):
                self.cameras.append(camera)
            else:
                break
        self.w, self.h = 0, 0
        self.pressed = 0
        # Containerized analysis of received image frames (an openMDAO assembly)
        # is defined next.

        # This assembly is designed to handle all image & signal analysis,
        # such as face detection, forehead isolation, time series collection,
        # heart-beat detection, etc.

        # Basically, everything that isn't communication
        # to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)

        # Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Data display - raw signal (top) and PSD (bottom)"

        # Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {
            "s": self.toggle_search,
            "d": self.toggle_display_plot,
            "c": self.toggle_cam,
            "f": self.write_csv,
            "v": self.toggle_video
        }

        self.video = cv2.VideoCapture('video1.avi')
        #self.video.set(cv2.CAP_PROP_FPS, 16)

        self.csvn = "Webcam-pulse"

        fps = self.video.get(cv2.CAP_PROP_FPS)
        print "Frames per second using video.get(cv2.CAP_PROP_FPS) : {0}".format(
            fps)

        #Creates a blank image of 200x200 size to initialise self.irframe
        blank_image = np.zeros((400, 400, 3), np.uint8)
        self.irframe = blank_image
        #self.irframe = None
        self.callbackFlag = False
    def __init__(self, args):
        # Imaging device - must be a connected camera (not an ip camera or mjpeg
        # stream)
        serial = args.serial
        baud = args.baud
        video = args.video
        self.Tx = False
        self.environment = []
        self.ser = None
        self.kill = False
        self.vidname = ""
        self.send_serial = False
        self.send_udp = False
        self.question_number = "-1"
        if serial:
            self.send_serial = True
            if not baud:
                baud = 9600
            else:
                baud = int(baud)
            self.serial = Serial(port=serial, baudrate=baud)

        udp = args.udp
        if udp:
            self.send_udp = True
            if ":" not in udp:
                ip = udp
                port = 5005
            else:
                ip, port = udp.split(":")
                port = int(port)
            self.udp = (ip, port)
            self.sock = socket.socket(
                socket.AF_INET,  # Internet
                socket.SOCK_DGRAM)  # UDP
        if video:
            self.vidname = video

        self.cameras = []
        self.selected_cam = 0

        if args.url is not None:
            camera = Camera(camera=args.url)
            self.cameras.append(camera)

        elif args.video_dir is None:
            # Real-time for camera=0, read from one video
            # first camera by default
            camera = Camera(camera=0, vid=self.vidname)
            if camera.valid or not len(self.cameras):
                self.cameras.append(camera)
            else:
                print('Error: No camera was found')

        else:
            # read all videos from a directory in a sequence
            self.video_names = glob.glob(args.video_dir + '/*.mp4')
            self.video_names.sort()
            for i in range(len(self.video_names)):
                # start from the first video
                camera = Video(vid=self.video_names[i])
                if camera.valid or not len(self.cameras):
                    self.cameras.append(camera)

        self.w, self.h = 0, 0
        # self.record = False
        self.sz = (int(self.cameras[self.selected_cam].cam.get(
            cv2.CAP_PROP_FRAME_WIDTH)),
                   int(self.cameras[self.selected_cam].cam.get(
                       cv2.CAP_PROP_FRAME_HEIGHT)))

        self.fourcc = cv2.VideoWriter_fourcc(*'MP4V')
        self.fps = 25
        self.q = 0
        # self.out = None
        self.pressed = 0
        # Containerized analysis of recieved image frames (an openMDAO assembly)
        # is defined next.

        # This assembly is designed to handle all image & signal analysis,
        # such as face detection, forehead isolation, time series collection,
        # heart-beat detection, etc.

        # Basically, everything that isn't communication
        # to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits=[50, 160],
                                          data_spike_limit=2500.,
                                          face_detector_smoothness=10.)
        self.processor.init_temp = args.init_temp
        # Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Data display - raw signal (top) and PSD (bottom)"

        # Maps keystrokes to specified methods
        # (A GUI window must have focus for these to work)
        self.key_controls = {
            "s": self.toggle_search,
            "d": self.toggle_display_plot,
            "c": self.toggle_cam,
            # "g": self.start_record,
            # "f": self.stop_record
        }
class AnalyseEmotion(object):
    """
    Python application that finds a face in a webcam stream, then isolates the
    forehead.

    Then the average green-light intensity in the forehead region is gathered 
    over time, and the detected person's pulse is estimated.
    """
    def __init__(self):
        #Imaging device - must be a connected camera (not an ip camera or mjpeg
        #stream)
        self.camera = Camera(camera=0) #first camera by default
        
        self.w,self.h = 0,0
        self.pressed = 0
        #Containerized analysis of recieved image frames (an openMDAO assembly)
        #is defined next.

        #This assembly is designed to handle all image & signal analysis,
        #such as face detection, forehead isolation, time series collection,
        #heart-beat detection, etc. 

        #Basically, everything that isn't communication
        #to the camera device or part of the GUI
        self.processor = findFaceGetPulse(bpm_limits = [50,160],
                                          data_spike_limit = 2500.,
                                          face_detector_smoothness = 10.)  

        #Init parameters for the cardiac data plot
        self.bpm_plot = False
        self.plot_title = "Cardiac info - raw signal, filtered signal, and PSD"

        #Maps keystrokes to specified methods
        #(A GUI window must have focus for these to work)
        self.key_controls = {"s" : self.toggle_search,
                             "d" : self.toggle_display_plot,
                             "f" : self.write_csv}
        
    def write_csv(self):
        """
        Writes current data to a csv file
        """
        bpm = " " + str(int(self.processor.measure_heart.bpm))
        fn = str(datetime.datetime.now()).split(".")[0] + bpm + " BPM.csv"
        
        data = np.array([self.processor.fft.times, 
                         self.processor.fft.samples]).T
        np.savetxt(fn, data, delimiter=',')
        


    def toggle_search(self):
        """
        Toggles a motion lock on the processor's face detection component.

        Locking the forehead location in place significantly improves
        data quality, once a forehead has been sucessfully isolated. 
        """
        state = self.processor.find_faces.toggle()
        if not state:
        	self.processor.fft.reset()
        print "face detection lock =",not state

    def toggle_display_plot(self):
        """
        Toggles the data display.
        """
        if self.bpm_plot:
            print "bpm plot disabled"
            self.bpm_plot = False
            destroyWindow(self.plot_title)
        else:
            print "bpm plot enabled"
            self.bpm_plot = True
            self.make_bpm_plot()
            moveWindow(self.plot_title, self.w,0)

    def key_handler(self):    
        """
        Handle keystrokes, as set at the bottom of __init__()

        A plotting or camera frame window must have focus for keypresses to be
        detected.
        """
        global line2
        self.pressed = waitKey(10) & 255 #wait for keypress for 10 ms
        if self.pressed == 27 and self.processor.fft.ready == True: #exit program on 'esc'
            print "exiting..."
            self.camera.cam.release()
            #a = sum(self.processor.fft.samples)/float(len(self.processor.fft.samples))
            print >>f, str(self.processor.show_bpm_text.bpm)
            print >>f, str(line2)
            #f.write(str(a) + "\n")
            #f.write(str(line2) + "\n")
            exit()

        for key in self.key_controls.keys():
            if chr(self.pressed) == key:
                self.key_controls[key]()

    def main_loop(self):
        """
        Single iteration of the application's main loop.
        """
        # Get current image frame from the camera
        frame = self.camera.get_frame()
        self.h,self.w,_c = frame.shape
        

        #display unaltered frame
        #imshow("Original",frame)

        #set current image frame to the processor's input
        self.processor.frame_in = frame
        #process the image frame to perform all needed analysis
        self.processor.run()
        #collect the output frame for display
        output_frame = self.processor.frame_out

        #show the processed/annotated output frame
        imshow("Processed",output_frame)

        #create and/or update the raw data display if needed
        #if self.bpm_plot:
            #self.make_bpm_plot() 
        #print(self.processor.fft.samples) 
        if(self.processor.fft.ready):
            print "Ready to hit escape"
        else:
            print".",
        
        img = cv.QueryFrame(capture)    
        if img:
            image = DetectRedEyes(img, faceCascade)
            cv.ShowImage("camera", image)
        #handle any key presses
        self.key_handler()