def main(): vcp = VideoCapturePlayer(processFunction=locateFacesProcess) vcp.main() pygame.quit()
if self.frames > 50 and len(self.bgs) < 10: self.bgs.append(snapshot) else: # Average them out to remove noise, and save as background self.background = pygame.transform.average_surfaces(self.bgs) self.calibrated = True self.frames += 1 def threshold(self, snapshot): dest = snapshot.copy() dest.fill((255,255,255)) # Make a black background threshold_value = 10 # How close to the existing colour must each point be? pygame.transform.threshold(dest, snapshot, (0,0,0), [threshold_value]*3 ,(255,255,255),1, self.background) # Median filter would be good here to remove salt + pepper noise... return dest #self.dest def process(self, snapshot): if not self.calibrated: return self.calibrate(snapshot) else: return self.threshold(snapshot) if __name__ == "__main__": ds = VisionDS() vcp = VideoCapturePlayer(processFunction=ds.process) vcp.main()
# get the colour in that rect self.ccolor = pygame.transform.average_color(snapshot, crect) # fill upper left corner with that color pygame.draw.rect(dest, self.ccolor, (0, 0, 50, 50), 0) # check to see if mouse has been pressed, if so use current color if pygame.event.peek(pygame.MOUSEBUTTONDOWN): self.calibrated = True return dest def threshold(self, snapshot): dest = snapshot.copy() print pygame.transform.threshold(dest, snapshot, self.ccolor, (30, 30, 30), (0, 0, 0), 1) return dest #self.dest def process(self, snapshot): if not self.calibrated: return self.calibrate(snapshot) else: return self.threshold(snapshot) if __name__ == "__main__": thresholder = Thresholder() vcp = VideoCapturePlayer(processFunction=thresholder.process) vcp.main()
self.bgs.append(snapshot) else: # Average them out to remove noise, and save as background self.background = pygame.transform.average_surfaces(self.bgs) self.calibrated = True self.frames += 1 def threshold(self, snapshot): dest = snapshot.copy() dest.fill((255, 255, 255)) # Make a black background threshold_value = 10 # How close to the existing colour must each point be? pygame.transform.threshold(dest, snapshot, (0, 0, 0), [threshold_value] * 3, (255, 255, 255), 1, self.background) # Median filter would be good here to remove salt + pepper noise... return dest #self.dest def process(self, snapshot): if not self.calibrated: return self.calibrate(snapshot) else: return self.threshold(snapshot) if __name__ == "__main__": ds = VisionDS() vcp = VideoCapturePlayer(processFunction=ds.process) vcp.main()
def main(): vcp = VideoCapturePlayer(processFunction=edgeDetectionProcess) vcp.main()
if len(self.bgs) < 10: self.bgs.append(snapshot) else: # Average them out to remove noise, and save as background self.background = pygame.transform.average_surfaces(self.bgs) self.calibrated = True def threshold(self, snapshot): dest = snapshot.copy() dest.fill((255, 255, 255)) # Make a black background threshold_value = 10 # How close to the existing colour must each point be? pygame.transform.threshold(dest, snapshot, (0, 0, 0), [threshold_value] * 3, (255, 255, 255), 1, self.background) # Median filter would be good here to remove salt + pepper noise... return dest #self.dest def process(self, snapshot): if not self.calibrated: return self.calibrate(snapshot) else: return self.threshold(snapshot) if __name__ == "__main__": greenScreen = GreenScreen() vcp = VideoCapturePlayer(processFunction=greenScreen.process) vcp.main()
#!/usr/bin/env python from pycam import VideoCapturePlayer, numpyFromSurf from scipy.ndimage import morphology @numpyFromSurf def dilate(image): return image #return morphology.grey_dilation(image,(10,10,1) ) if __name__ == '__main__': vcp = VideoCapturePlayer(processFunction=dilate) vcp.main()
def overlayAHat(surf, face): # Draw an image of a hat on top of the face. width_factor, height_factor = 5 / 5, 3 / 5 scaled_hat = transform.scale( hat, (int(width_factor * face.width), int(height_factor * face.height))) hat_x = int(face.left + (face.width / 2) - width_factor * face.width / 2) hat_y = int(face.top - height_factor * face.height / 2) surf.blit(scaled_hat, (hat_x, hat_y)) def drawHatOnFaces(surf): faces = pygameFaceDetect.getFaces(surf) if faces: s = pygameFaceDetect.faceDetect.image_scale for face in faces: bounding_rect = Rect(face.x * s, face.y * s, face.width * s, face.height * s) #pygame.draw.rect(surf, Color("blue"), bounding_rect, 2) try: overlayAHat(surf, bounding_rect) except NameError: drawAHat(surf, bounding_rect) return surf if __name__ == "__main__": VideoCapturePlayer(processFunction=drawHatOnFaces).main()
#!/usr/bin/env python # a simple edge detection filter from pycam import VideoCapturePlayer from pygame import surfarray import numpy from scipy import signal laplacian_filter = numpy.array([[0, 1, 0], [1, -4, 1], [0, 1, 0]]) def edgeDetect(surf): gray_image = numpy.mean(surfarray.array3d(surf), 2) edges = signal.convolve2d(gray_image, laplacian_filter, mode="same") surf = surfarray.make_surface(edges) return surf if __name__ == '__main__': vcp = VideoCapturePlayer(processFunction=edgeDetect) vcp.main()
def main(): vcp = VideoCapturePlayer(processFunction=gaussianBlur) vcp.main()
from pycam.objectDetect import ObjectDetector def locatePeopleProcess(surf, lastState=[]): person = False for detector in detectors: # Try detect anything at all! cvMat = surf2CV(surf) objects = detector.detectObject(cvMat) for o in objects: if o: person = True break lastState.append(person) if len(lastState)> 3 and lastState[-1] is not lastState[-2]: if person: print "There is a person in front of this computer" else: print "There is no one at this computer" if __name__ == "__main__": # eyeDetector = ObjectDetector("eye") detectors = [ ObjectDetector("head"), ObjectDetector("face"), ObjectDetector("upperbody"), ] vcp = VideoCapturePlayer(processFunction=locatePeopleProcess,show=False) vcp.main() pygame.quit()
from pycam.conversionUtils import * from pycam.pygameFaceDetect import getFaces from pycam.objectDetect import ObjectDetector from pygame.locals import * eyeDetector = ObjectDetector("eye") def locateFaceAndEyeProcess(surf): faces = getFaces(surf) s = eyeDetector.image_scale if faces: for face in faces: r = pygame.Rect(face.x * s, face.y * s, face.width * s, face.height * s) pygame.draw.rect(surf, Color("green"), r, 1) facialSurf = surf.subsurface(r) facialCvMat = surf2CV(facialSurf) eyeDetector.detect_and_draw(facialCvMat) pygame.surfarray.blit_array(facialSurf, cv2SurfArray(facialCvMat)) return surf if __name__ == "__main__": vcp = VideoCapturePlayer(processFunction=locateFaceAndEyeProcess, forceOpenCv=True) vcp.main() pygame.quit()