Example #1
0
        fullResStream = picamera.array.PiRGBArray(camera)
        with picamera.array.PiRGBArray(camera) as stream:
            camera.resolution = (160, 120)
            #i = 0
            fr = 1
            while True:
                camera.capture(stream, format='bgr', use_video_port=True)

                # At this point the image is available as stream.array
                image = stream.array
                #rotate image
                #image=image[::-1,::-1]

                if detectRight:
                    hands = handDetect(hand_cascade,
                                       image,
                                       detectLeft=False,
                                       getNegSamples=False)
                    rhValidator.feedNewHandList(hands)
                    if rhValidator.checkForDetectionAndUpdateCounter():
                        rbeep.play()
                        #print rhValidator.foundHandPos
                        foundRHand = np.array(rhValidator.foundHandPos)
                        foundRHandFullSize = RESIZE_FULL_RES_FACTOR * foundRHand  #
                        print foundRHand
                        newx, newy, neww, newh = foundRHand
                        detectRight = True
                else:
                    # check for left in same frame and in num_attmept next frames
                    if numLeftAttempts < MAX_NUM_LEFT_ATTMEPTS:
                        print "searching for left hand attempt#", numLeftAttempts
                        lhands = handDetect(hand_cascade,
Example #2
0
 with picamera.PiCamera() as camera:
     fullResStream = picamera.array.PiRGBArray(camera)
     with picamera.array.PiRGBArray(camera) as stream:
         camera.resolution = (160,120)
         #i = 0
         fr = 1
         while True:
             camera.capture(stream, format='bgr' , use_video_port = True)
             
             # At this point the image is available as stream.array
             image = stream.array
             #rotate image
             #image=image[::-1,::-1]
             
             if detectRight:
                 hands = handDetect(hand_cascade , image ,detectLeft = False,  getNegSamples = False)
                 rhValidator.feedNewHandList(hands)
                 if rhValidator.checkForDetectionAndUpdateCounter():
                     rbeep.play()
                     #print rhValidator.foundHandPos
                     foundRHand = np.array(rhValidator.foundHandPos)
                     foundRHandFullSize = RESIZE_FULL_RES_FACTOR*foundRHand # 
                     print foundRHand
                     newx,newy,neww,newh = foundRHand
                     detectRight = False
             else:
                 # check for left in same frame and in num_attmept next frames
                 if numLeftAttempts < MAX_NUM_LEFT_ATTMEPTS:
                     print "searching for left hand attempt#",numLeftAttempts
                     lhands = handDetect(hand_cascade, image, detectLeft = True, getNegSamples = False)
                     print lhands
	

hand_cascade = cv2.CascadeClassifier('cascadeMT.xml')
with picamera.PiCamera() as camera:
    with picamera.array.PiRGBArray(camera) as stream:
        camera.resolution = (160,120)
        while True:
            camera.capture(stream, format='bgr' , use_video_port = True)
            
            # At this point the image is available as stream.array
            image = stream.array
            gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
            gray = cv2.flip(gray,0)
            gray = cv2.flip(gray,1)
            handDetect(hand_cascade , gray ,detectLeft = True, getNegSamples = False)
            
            cv2.imshow('image',gray)
            if cv2.waitKey(1) & 0xFF == ord('q'):
                break
            stream.seek(0)
            stream.truncate()
        
          
        cv2.destroyAllWindows()