def main2():
    handTracking.init_cpm_session()
    gesture_counter = [0] * len(handTracking.gesture_list)
    track_face = False

    while True:
        img = get_img_from_stream()
        resized_image = cv2.resize(img, (480, 360))
        if not imageAnalysis.FOUND_FACE:
            img2 = handTracking.trackHandCPM(resized_image)

            gesture = handTracking.get_gesture(resized_image)
        if gesture == "pitch":
            gesture_counter = set_gesture_counter("pitch", gesture_counter)
            if gesture_counter[0] >= 10:
                print("Land drone!")
        elif gesture == "fist":
            gesture_counter = set_gesture_counter("fist", gesture_counter)
            if gesture_counter[1] >= 10:
                print("Take off")
        elif gesture == "victory":
            gesture_counter = set_gesture_counter("victory", gesture_counter)
            if gesture_counter[2] >= 10:
                track_face = not track_face
                gesture_counter = set_gesture_counter("pitch", gesture_counter)
                print("Tracking face: ", track_face)
        elif gesture == "thumbs_up":
            gesture_counter = set_gesture_counter("thumbs_up", gesture_counter)

        if track_face:
            img2 = imageAnalysis.recognize_face(resized_image)

        print(gesture_counter)

        show_image(resized_image)
def control_drone_by_mode(image, mode=None):
    global face_counter

    throttleValue = 0
    yawValue = 0
    pitchValue = 0

    # implement faceTracking
    if mode == mode_list[0]:
        _, centerX, centerY, faceWidth, faceHeight = imageAnalysis.recognize_face(image)

        if centerX is not None:
            face_counter = 0

            throttleValue = -centerY / (height / 2)
            yawValue = centerX / (width / 2)
            pitchValue = 0
            if faceWidth * faceHeight > 0.1 * width * height:  # 0.2
                pitchValue = -0.2
            elif faceWidth * faceHeight < 0.01 * width * height:  # 0.05
                pitchValue = 0.2
        else:
            throttleValue = 0
            yawValue = 0
            pitchValue = 0
            face_counter += 1

    # implement handTracking
    elif mode == mode_list[1]:
        face_counter = 0
        img2, x, y, w, h = handTracking.trackHandCPM(image)

        if not handTracking.tracker.loss_track:
            centerX, centerY = computer_center_points(x, y, w, h, image)
            throttleValue = -centerY / (height / 2)
            yawValue = centerX / (width / 2)
            if w * h >= 0.01 * width * height:
                pitchValue = -0.2
            elif w * h < 0.008 * width * height:
                pitchValue = 0.2
        else:
            throttleValue = 0
            yawValue = 0
            pitchValue = 0

    return throttleValue, yawValue, pitchValue
def main():
    # init hand tracking model and the drone
    handTracking.init_cpm_session()
    drone = tellopy.Tello()

    try:
        # init Pygame window and connect the drone
        pygame.init()
        pygame.display.set_mode((1, 1))

        if SAVE_VIDEO:
            video_output = cv2.VideoWriter(VIDEO_FILENAME, VIDEO_CODEC, 15.0,
                                           (960, 720))

        drone.connect()
        drone.wait_for_connection(60.0)

        container = av.open(drone.get_video_stream())
        # skip first 300 frames
        frame_skip = 300

        while True:
            for frame in container.decode(video=0):
                if 0 < frame_skip:
                    frame_skip = frame_skip - 1
                    continue
                # manuall controls for take off and landing
                for event in pygame.event.get():
                    if event.type == pygame.KEYDOWN:
                        if event.key == pygame.K_LCTRL:
                            drone.takeoff()
                        if event.key == pygame.K_w:
                            drone.land()

                start_time = time.time()

                img = cv2.cvtColor(numpy.array(frame.to_image()),
                                   cv2.COLOR_RGB2BGR)

                if SAVE_VIDEO:
                    video_output.write(img)
                resized_image = cv2.resize(img, (IMAGE_WIDTH, IMAGE_HEIGHT))

                img2, x, y, w, h = handTracking.trackHandCPM(resized_image)
                if not handTracking.tracker.loss_track:
                    centerX, centerY = computer_center_points(
                        x, y, w, h, resized_image)
                    throttleValue = -centerY / (IMAGE_HEIGHT / 2)
                    yawValue = centerX / (IMAGE_WIDTH / 2)
                    if w * h >= 0.01 * IMAGE_WIDTH * IMAGE_HEIGHT:
                        pitchValue = -0.2
                    elif w * h < 0.008 * IMAGE_WIDTH * IMAGE_HEIGHT:
                        pitchValue = 0.2
                else:
                    throttleValue = 0
                    yawValue = 0
                    pitchValue = 0

                drone.set_throttle(throttleValue)
                drone.set_yaw(yawValue)
                drone.set_pitch(pitchValue)

                show_image(resized_image)
                frame_skip = int((time.time() - start_time) / frame.time_base)

    except Exception as ex:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        traceback.print_exception(exc_type, exc_value, exc_traceback)
        print(ex)
    finally:
        # safely land drone
        drone.quit()
        cv2.destroyAllWindows()
Exemple #4
0
def main():
    handTracking.init_cpm_session()
    drone = tellopy.Tello()

    mode = None

    try:
        pygame.init()
        pygame.display.set_mode((1, 1))
        fourcc = cv2.VideoWriter_fourcc(*'mp4v')
        out = cv2.VideoWriter('output.avi', fourcc, 15.0, (960, 720))
        drone.connect()
        drone.wait_for_connection(60.0)

        container = av.open(drone.get_video_stream())
        # skip first 300 frames
        frame_skip = 300

        gesture_counter = [0] * len(handTracking.gesture_list)
        while True:
            for frame in container.decode(video=0):
                if 0 < frame_skip:
                    frame_skip = frame_skip - 1
                    continue
                for event in pygame.event.get():
                    if event.type == pygame.KEYDOWN:
                        if event.key == pygame.K_LCTRL:
                            drone.takeoff()
                        if event.key == pygame.K_w:
                            drone.land()
                        if event.key == pygame.K_0:
                            mode = None
                        if event.key == pygame.K_1:
                            mode = mode_list[0]
                        if event.key == pygame.K_2:
                            mode = mode_list[1]

                start_time = time.time()

                img = cv2.cvtColor(numpy.array(frame.to_image()),
                                   cv2.COLOR_RGB2BGR)

                if SAVE_VIDEO:
                    out.write(img)

                resized_image = cv2.resize(img, (width, height))

                img2, x, y, w, h = handTracking.trackHandCPM(resized_image)
                gesture = handTracking.get_gesture(resized_image)

                if gesture == "pitch":
                    gesture_counter = set_gesture_counter(
                        "pitch", gesture_counter)
                    print(gesture_counter)
                    if gesture_counter[0] >= 10:
                        drone.land()

                elif gesture == "fist":
                    pitchCounter = 0
                    fistCounter += 1
                    victoryCounter = 0

                    if gesture_counter[1] >= 5:
                        drone.takeoff()
                elif gesture == "victory":
                    pitchCounter = 0
                    fistCounter = 0
                    victoryCounter += 1

                    if gesture_counter[2] >= 5:
                        drone.up(70)
                        time.sleep(0.5)
                        drone.down(70)
                        time.sleep(0.5)

                throttleValue, yawValue, pitchValue = control_drone_by_mode(
                    resized_image, mode)

                drone.set_throttle(throttleValue)
                drone.set_yaw(yawValue)
                drone.set_pitch(pitchValue)

                show_image(resized_image)
                frame_skip = int((time.time() - start_time) / frame.time_base)

    except Exception as ex:
        exc_type, exc_value, exc_traceback = sys.exc_info()
        traceback.print_exception(exc_type, exc_value, exc_traceback)
        print(ex)
    finally:
        drone.quit()
        cv2.destroyAllWindows()