示例#1
0
def servo_xy(x, y):
    if (not puppypi_config.servousage):
        puppypi_util.printmsg("Servo Ignored :  x:{} y:{}".format(x, y))
        return

    puppypi_util.printmsg("Servo x:{} y:{}".format(x, y))
    pi = pigpio.pi()  # Connect to local Pi.
    pi.set_servo_pulsewidth(puppypi_config.gpio_x, x)
    pi.set_servo_pulsewidth(puppypi_config.gpio_y, y)
示例#2
0
def do_button_debug():
    puppypi_util.printmsg("Debug Button Press Mode")
    GPIO.setmode(GPIO.BCM)
    GPIO.setup(puppypi_config.gpio_button_pcb, GPIO.IN)

    while True:
        input_state = GPIO.input(puppypi_config.gpio_button_pcb)
        if input_state == False:
            print('Button Pressed')
            time.sleep(0.2)
示例#3
0
def main():
    parser = argparse.ArgumentParser(description='Face processing puppy.')


    parser.add_argument("-x", "--servo_x", type=int, help="servo x setting")
    parser.add_argument("-y", "--servo_y", type=int, help="servo y setting")
    parser.add_argument("--videofile", help="pre recorded video file")
    parser.add_argument("--aws", help="test AWS")
    parser.add_argument("-v", "--verbose", help="increase output verbosity", action="store_true")
    parser.add_argument("-b", "--button", help="wait for external button press", action="store_true")
    parser.add_argument("-db", "--debugbutton", help="print a debug message on PCB button press", action="store_true")
    parser.add_argument("--noservo", help="surpress the servo", action="store_true")
    parser.add_argument("--livevideo", help="live video", action="store_true")
    parser.add_argument("--servodemo", help="demonstrate the servo", action="store_true")
    parser.add_argument("--showvideoframe", help="Display a video frame via XWindows", action="store_true")
    parser.add_argument("--novideo", help="Surpress a video frame via XWindows", action="store_true")
    
    args = parser.parse_args()

    puppypi_config.verbosemode= args.verbose
    puppypi_config.showvideoframe= args.showvideoframe
    puppypi_config.novideo= args.novideo

    if args.noservo:
        puppypi_config.servousage = False
        puppypi_util.printmsg("Servo turned off")

    if (args.livevideo):
        puppypi_servo.servo_on()
        puppypi_video.process_livevideo()
        puppypi_servo.servo_off()

    elif (args.button):
        puppypi_button.do_button()

    elif (args.aws):
        puppypi_aws.mainAWS(args.aws)

    elif args.videofile:
        puppypi_config.servousage = False
        puppypi_video.process_video(args.videofile)
 
    elif (args.servo_x >0 and args.servo_y >0):
        puppypi_servo.servo_on()
        puppypi_servo.servo_xy(args.servo_x, args.servo_y)
        puppypi_servo.servo_off()
        
    elif args.servodemo:
        puppypi_util.printmsg("Servo Demo")
        puppypi_servo.servo_on()
        puppypi_servo.servo_demo()
        puppypi_servo.servo_off()

    elif (args.debugbutton):
        puppypi_button.do_button_debug()
示例#4
0
def process_video(file_video):
    puppypi_util.printmsg('Video source:"{}"'.format(file_video))

    # Create the haar cascade
    faceCascade = cv2.CascadeClassifier(puppypi_config.file_cascPath)

    # capture frames from the camera
    cap = cv2.VideoCapture(file_video)

    while (cap.isOpened()):
        ret, frame = cap.read()
        if frame is None:
            break
        process_frame(faceCascade, frame, False)
示例#5
0
def do_button():
    puppypi_util.printmsg("Button Press Mode")
    button_setup()

    while True:
        time.sleep(0.2)

        if button_is_yellow():
            print('Button Pressed Yellow')
            puppypi_servo.servo_on()
            puppypi_video.process_livevideo()
            puppypi_servo.servo_off()
            time.sleep(0.2)

        if button_is_green():
            print('Button Pressed Green')
            file_string = './tmp/snapped_{}'.format(
                datetime.datetime.today().strftime('%Y%m%d-%H%M%S'))
            puppypi_aws.mainAWS(file_string)
            time.sleep(0.2)
示例#6
0
def process_frame(faceCascade, frame, alwaysShowFrame):
    retFaceFound = False
    gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)

    # Detect faces in the image
    faces = faceCascade.detectMultiScale(gray,
                                         scaleFactor=1.1,
                                         minNeighbors=5,
                                         minSize=(30, 30),
                                         flags=cv2.cv.CV_HAAR_SCALE_IMAGE)

    # Draw a rectangle around the faces
    for (x, y, w, h) in faces:
        cv2.circle(frame, (x + w / 2, y + h / 2), int((w + h) / 3),
                   (0, 0, 255), 4)

    facesfound = len(faces)
    if facesfound > 0:
        retFaceFound = True
        puppypi_util.printmsg("Found {} faces. x:{}, y{}, z{}".format(
            len(faces), x + w / 2, y + h / 2, (w + h) / 3))
        jpg_file = './tmp/face_found_{}.jpg'.format(
            datetime.datetime.today().strftime('%Y%m%d-%H%M%S'))
        cv2.imwrite(jpg_file, frame, [int(cv2.IMWRITE_JPEG_QUALITY), 90])
        delta_x = (x + w / 2 - (capture_setup_x / 2)) * -delta_factor
        delta_y = (y + h / 2 - (capture_setup_y / 2)) * delta_factor
        puppypi_util.printmsg('Delta x:{} y:{}'.format(delta_x, delta_y))
        if (delta_x > ignore_threshold or delta_x < -1 * ignore_threshold
                or delta_y > ignore_threshold
                or delta_y < -1 * ignore_threshold):
            puppypi_servo.servo_delta(delta_x, delta_y)

    if (not puppypi_config.novideo) and (alwaysShowFrame or facesfound > 0):
        cv2.imshow('frame', frame)
        key = cv2.waitKey(1) & 0xFF  # wait until rendered

    return retFaceFound
示例#7
0
def process_livevideo():
    puppypi_util.printmsg('Live Video source')

    puppypi_servo.servo_centre()
    AwayFromCentre = False

    # Create the haar cascade
    faceCascade = cv2.CascadeClassifier(puppypi_config.file_cascPath)

    # initialize the camera and grab a reference to the raw camera capture
    camera = PiCamera()
    camera.resolution = (capture_setup_x, capture_setup_y)
    camera.framerate = 32
    rawCapture = PiRGBArray(camera, size=(capture_setup_x, capture_setup_y))

    # allow the camera to warmup
    time.sleep(0.1)

    # capture frames from the camera
    FaceLastSeen = time.time()
    for frame in camera.capture_continuous(rawCapture,
                                           format="bgr",
                                           use_video_port=True):
        if not puppypi_button.button_is_yellow():
            puppypi_util.printmsg("Button presset to stop tracking")
            puppypi_servo.servo_centre()
            cv2.destroyAllWindows()
            camera.close()
            return

        image = frame.array
        wasFaceFound = process_frame(faceCascade, image, True)
        rawCapture.truncate(0)
        if (wasFaceFound):
            FaceLastSeen = time.time()
            AwayFromCentre = True
        elif (AwayFromCentre and (time.time() - FaceLastSeen) >
              5):  # more than 5 seconds, recentre camera
            puppypi_util.printmsg("It's been a while - recente")
            puppypi_servo.servo_centre()
            AwayFromCentre = False
示例#8
0
    elif (args.aws):
        puppypi_aws.mainAWS(args.aws)

    elif args.videofile:
        puppypi_config.servousage = False
        puppypi_video.process_video(args.videofile)

    elif (args.servo_x > 0 and args.servo_y > 0):
        puppypi_servo.servo_on()
        puppypi_servo.servo_xy(args.servo_x, args.servo_y)
        puppypi_servo.servo_off()

    elif args.servodemo:
        puppypi_util.printmsg("Servo Demo")
        puppypi_servo.servo_on()
        puppypi_servo.servo_demo()
        puppypi_servo.servo_off()

    elif (args.debugbutton):
        puppypi_button.do_button_debug()


if __name__ == "__main__":
    try:
        main()

    finally:
        puppypi_servo.servo_off()
        puppypi_util.printmsg("Cleanup and exit")
示例#9
0
def servo_centre():
    puppypi_util.printmsg('Servo centred')
    puppypi_config.servo_x = puppypi_config.x_mid
    puppypi_config.servo_y = puppypi_config.y_mid
    servo_update()