コード例 #1
0
    def on_init(self, controller):

        ### Step 1: create max interface ###
        self.max_interface = Max_Interface()

        ### Step 2: notify of initialization ###
        print_status("Controller Listener", "controller initialized")
コード例 #2
0
    def __init__(self):

        print_welcome()

        ### Step 1: create the listener, controller and connect the two ###
        self.listener = Synth_Listener()
        self.controller = Leap.Controller()
        self.controller.add_listener(self.listener)

        ### Step 2: create controller and gesture recognizer ###
        self.max_interface = Max_Interface()
        self.gesture_recognizer = Gesture_Recognizer()
コード例 #3
0
    def on_init(self, controller):
       
        ### Step 1: create max interface ###
        self.max_interface = Max_Interface ()

        ### Step 2: notify of initialization ###
        print_status ("Controller Listener", "controller initialized")
コード例 #4
0
ファイル: run.py プロジェクト: jayhack/leap_synth
    def __init__ (self):

        print_welcome ()

        ### Step 1: create the listener, controller and connect the two ###
        self.listener = Synth_Listener ()
        self.controller = Leap.Controller ()
        self.controller.add_listener (self.listener)

        ### Step 2: create controller and gesture recognizer ###
        self.max_interface = Max_Interface ()
        self.gesture_recognizer = Gesture_Recognizer ()
コード例 #5
0
class Leap_Synth:

    #--- Member Objects ---
    listener = None
    controller = None
    max_interface = None
    gesture_recognizer = None

    # Function: Constructor
    # ---------------------
    # initializes member objects
    def __init__(self):

        print_welcome()

        ### Step 1: create the listener, controller and connect the two ###
        self.listener = Synth_Listener()
        self.controller = Leap.Controller()
        self.controller.add_listener(self.listener)

        ### Step 2: create controller and gesture recognizer ###
        self.max_interface = Max_Interface()
        self.gesture_recognizer = Gesture_Recognizer()

    # Function: Destructor
    # --------------------
    # removes the listener from the controller
    def __del__(self):

        ### Step 1: turn off the max patch ###
        self.max_interface.send_gesture('Stop')

        ### Step 2: remove leap listener ###
        self.controller.remove_listener(self.listener)

    # Function: get_frame
    # -------------------
    # blocks until it gets a new frame from the listener
    def get_frame(self):

        while (self.listener.new_frame_available == False):
            pass

        frame = self.listener.most_recent_frame
        self.listener.new_frame_available = False

        return frame

    ########################################################################################################################
    ##############################[ --- User Interface --- ]################################################################
    ########################################################################################################################

    # Function: interface_main
    # ------------------------
    # main function for all interface
    def interface_main(self):

        viable_options = ['r', 't', 's']

        ### Step 1: get their requested mode ###
        print_message("What mode would you like to enter?")
        print " - R: record mode"
        print " - T: train mode"
        print " - S: synth mode"
        response = raw_input("---> ")
        response = response.lower()

        if response == 'r':
            while (True):
                self.record_main()
        elif response == 't':
            self.train_main()
        elif response == 's':
            while (True):
                self.synth_main()
        else:
            print_message("Error: did not recognize that option")
            self.interface_main()

    # Function: record_main
    # ---------------------
    # interface for recording gestures
    def record_main(self):

        while (True):
            print_message("What would you like to do?")
            print " - R: record a new gesture"
            print " - Q: quit"
            response = raw_input("---> ")
            response = response.lower()

            if response == 'q':
                exit()
            else:
                self.record_gesture()

    # Function: record_countdown
    # --------------------------
    # prints out a countdown
    def record_countdown(self):
        print "3"
        time.sleep(0.5)
        print "2"
        time.sleep(0.5)
        print "1"
        time.sleep(0.5)
        print "--- record ---"

    # Function: record_gesture
    # ------------------------
    # record a single gesture
    def record_gesture(self):

        num_examples_recorded = 0
        max_examples = 10

        ### Step 1: have them name the gesture ###
        print_message("What is this gesture called?")
        gesture_name = raw_input("---> ")
        print_message("Now we will begin recording " + str(max_examples) +
                      " examples of this gesture, " + str(gesture_name) +
                      ". Press Enter when ready.")
        sys.stdin.readline()

        record_gesture = Gesture(gesture_name)

        #--- initialize parameters ---
        is_recording = False
        num_frames_recorded = 0

        while (num_examples_recorded < max_examples):

            frame = self.get_frame()
            record_gesture.add_frame(frame)

            if record_gesture.is_full():

                ### --- Notify of recording status --- ###
                if is_recording:
                    print "."
                    num_frames_recorded += 1
                else:
                    print "x"

                ### --- Check if we should end the recording --- ###
                if num_frames_recorded >= record_gesture.gesture_length:
                    print_message("### Recording Complete ###")
                    is_recording = False
                    num_frames_recorded = 0
                    num_examples_recorded += 1
                    self.gesture_recognizer.save_gesture(record_gesture)

                ### --- Check if we should start the recording --- ###
                while sys.stdin in select.select([sys.stdin], [], [], 0)[0]:
                    line = sys.stdin.readline()
                    if line:
                        print_message("### Started Recording ###")
                        is_recording = True

    # Function: train_main
    # --------------------
    # train the classifier
    def train_main(self):

        ### Step 1: load in the data and print out stats about it ###
        print_status("Gesture_Recognizer", "Loading Data")
        self.gesture_recognizer.load_data()
        # self.gesture_recognizer.eliminate_second_hand ()
        self.gesture_recognizer.print_data_stats()
        ### Step 2: cluster the poses ###

        print_status("Gesture_Recognizer", "Training Model")
        self.gesture_recognizer.train_model()

    ########################################################################################################################
    ##############################[ --- Synth Main --- ]####################################################################
    ########################################################################################################################

    # Function: synth_main_disrcete
    # --------------------
    # records discrete gestures and classifies them for you.
    def synth_main_discrete(self):

        self.gesture_recognizer.load_model()

        print_message("Recording Discrete events")
        while (True):

            ### Step 1: initialize the gesture ###
            observed_gesture = Gesture()

            ### Step 2: start the recording ###
            self.record_countdown()

            ### Step 3: fill it with frames ###
            while not observed_gesture.is_full():
                frame = self.get_frame()
                observed_gesture.add_frame(frame)

            ### Step 4: stop the recording and classify ###
            print_message("### Recording Complete ###")
            self.gesture_recognizer.classify_gesture(observed_gesture)

            print_message("enter to continue")
            sys.stdin.readline()

    # Function: get_gesture
    # ---------------------
    # function to wait on gestures
    def get_gesture(self, observed_gesture):

        ### --- add the current frame --- ###
        frame = self.get_frame()
        observed_gesture.add_frame(frame)

        if observed_gesture.is_full():

            ### --- get classification results --- ###
            classification_results = self.gesture_recognizer.classify_gesture(
                observed_gesture)

            ### --- interpret them --- ###
            return self.classification_results

    # Function: get_coords
    # --------------------
    # returns (x,y,z) of the hand
    def get_continuous_coords(self):

        ### --- add the current frame --- ###
        frame = self.get_frame()

        if len(frame.hands) == 0:
            return None
        else:
            position = hands[0].palm_position
            return (position[0], position[1], posiiton[2])

    # Function: get_position_and_orientation
    # --------------------------------------
    # given a frame, this returns the (palm_position, palm_orientation) if we observe
    # a fist (0 fingers visible); (None, None) otherwise
    def get_position_and_orientation(self, frame):

        ### Step 1: peace out if there are no hands ###
        hands = frame.hands
        if len(hands) == 0:
            return (None, None)

        ### Step 2: peace out if there are any fingers (not a fist) ###
        fingers = hands[0].fingers
        if len(fingers) > 0:
            return (None, None)

        ### Step 3: get position and orientation ###
        hand = hands[0]
        palm_position = hands[0].palm_position
        position = (palm_position[0], palm_position[1], palm_position[2])
        palm_normal = hands[0].palm_normal
        orientation = (palm_normal[0], palm_normal[1], palm_normal[2])

        return (position, orientation)

    # Function: synth_main
    # --------------------
    # maintains a 70-frame gesture and tries to classify it
    def synth_main(self):

        ### Step 1: start the max patch ###
        self.max_interface.send_gesture('Start')

        ### Step 2: initialize local data ###
        print_message("Entering Main Loop: Continuous Gesture Recognition")
        self.gesture_recognizer.load_model()
        observed_gesture = Gesture()

        ### Step 3: enter main loop ###
        while (True):

            ### Step 1: add the current frame to observed_gesture ###
            frame = self.get_frame()
            observed_gesture.add_frame(frame)

            ### Step 2: get position and orientation (returns (None, None) if not a fist) ###
            (palm_position,
             palm_orientation) = self.get_position_and_orientation(frame)

            ### Step 3: Get the gesture, if appropriate ###
            send_gesture = None

            if observed_gesture.is_full():

                classification_results = self.gesture_recognizer.classify_gesture(
                    observed_gesture)
                if classification_results:
                    prediction = classification_results[0]
                    prediction_prob = classification_results[1]
                    print_message("Prediction: " + str(prediction) +
                                  " | Probability: " + str(prediction_prob))
                    send_gesture = prediction
                    observed_gesture.clear()

            ### Step 4: send a gesture to max if one was observed ###
            if send_gesture:
                self.max_interface.send_gesture(send_gesture)

            ### Step 5: Send hand state to max if one was observed ###
            if len(frame.hands) > 0:
                self.max_interface.send_hand_state(frame.hands[0])
コード例 #6
0
ファイル: run.py プロジェクト: jayhack/LeapReceiver
class Leap_Synth:

    #--- Member Objects ---
    listener            = None
    controller          = None
    max_interface       = None
    gesture_recognizer  = None


    # Function: Constructor 
    # ---------------------
    # initializes member objects
    def __init__ (self):

        print_welcome ()

        ### Step 1: create the listener, controller and connect the two ###
        self.listener = Synth_Listener ()
        self.controller = Leap.Controller ()
        self.controller.add_listener (self.listener)

        ### Step 2: create controller and gesture recognizer ###
        self.max_interface = Max_Interface ()
        self.gesture_recognizer = Gesture_Recognizer ()

        ### Step 3: determine what the fps is ###
        self.determine_fps ()


    # Function: Destructor 
    # --------------------
    # removes the listener from the controller
    def __del__ (self):

        ### Step 1: turn off the max patch ###
        self.max_interface.send_gesture ('Stop')

        ### Step 2: remove leap listener ###        
        self.controller.remove_listener(self.listener)



    # Function: get_frame
    # -------------------
    # blocks until it gets a new frame from the listener
    def get_frame (self):

        while (self.listener.new_frame_available == False):
            pass

        frame = self.listener.most_recent_frame
        self.listener.new_frame_available = False

        return frame



    # Function: determine_fps
    # -----------------------
    # gets 30 frames in order to determine the fps
    def determine_fps (self):

        start = timeit.timeit()
        for i in range(30):
            print i
            frame = self.get_frame ()
        stop = timeit.timeit()

        self.fps = stop - start
        print "fps: ", self.fps









    ########################################################################################################################
    ##############################[ --- User Interface --- ]################################################################
    ########################################################################################################################

    # Function: interface_main
    # ------------------------
    # main function for all interface
    def interface_main (self):

        viable_options =['r', 't', 's']

        ### Step 1: get their requested mode ###
        print_message ("What mode would you like to enter?")
        print " - R: record mode"
        print " - T: train mode"
        print " - S: synth mode"
        response = raw_input ("---> ")
        response = response.lower ()

        if response == 'r':
            while (True):
                self.record_main ()
        elif response == 't':
            self.train_main ()
        elif response == 's':
            while (True):
                self.synth_main ()
        else:
            print_message("Error: did not recognize that option")
            self.interface_main ()


    # Function: record_main
    # ---------------------
    # interface for recording gestures
    def record_main (self):

        while (True):
            print_message ("What would you like to do?")
            print " - R: record a new gesture"
            print " - Q: quit"
            response = raw_input ("---> ")
            response = response.lower ()

            if response == 'q':
                exit ()
            else:
                self.record_gesture ()


    # Function: record_countdown 
    # --------------------------
    # prints out a countdown
    def record_countdown (self):
        print "3"
        time.sleep (0.5)
        print "2"
        time.sleep (0.5)
        print "1"
        time.sleep (0.5)
        print "--- record ---"


    # Function: record_gesture 
    # ------------------------
    # record a single gesture
    def record_gesture (self):

        num_examples_recorded = 0
        max_examples = 10

        ### Step 1: have them name the gesture ###
        print_message ("What is this gesture called?")
        gesture_name = raw_input("---> ")
        print_message ("Now we will begin recording " + str(max_examples) + " examples of this gesture, " + str(gesture_name) + ". Press Enter when ready.")
        sys.stdin.readline ()

        record_gesture = Gesture (gesture_name)



        #--- initialize parameters ---
        is_recording        = False
        num_frames_recorded = 0

        while (num_examples_recorded < max_examples):

            frame = self.get_frame ()
            record_gesture.add_frame (frame)

            if record_gesture.is_full ():


                ### --- Notify of recording status --- ###
                if is_recording:
                    print "."
                    num_frames_recorded += 1
                else:
                    print "x"

                ### --- Check if we should end the recording --- ###
                if num_frames_recorded >= record_gesture.gesture_length:
                    print_message ("### Recording Complete ###")
                    is_recording = False
                    num_frames_recorded = 0
                    num_examples_recorded += 1
                    self.gesture_recognizer.save_gesture(record_gesture)

                ### --- Check if we should start the recording --- ### 
                while sys.stdin in select.select([sys.stdin], [], [], 0)[0]:
                  line = sys.stdin.readline()
                  if line:
                    print_message ("### Started Recording ###")
                    is_recording = True



    # Function: train_main
    # --------------------
    # train the classifier 
    def train_main (self):

        ### Step 1: load in all the gestures ###
        print_message ("Loading gestures")
        self.gesture_recognizer.load_gestures ()
        self.gesture_recognizer.print_gestures_stats ()

        ### Step 2: train the HMMs ###
        print_message ("Getting hmms")
        self.gesture_recognizer.get_hmms ()

        ### Step 3: get examples ###
        print_message ("Getting examples for training/testing")
        self.gesture_recognizer.get_all_examples ()
        self.gesture_recognizer.split_training_testing_examples ()

        ### Step 4: train the classifier and save the entire model ###
        self.gesture_recognizer.train_classifier ()
        self.gesture_recognizer.save_model ()

        ### Step 5: evaluate the classifier ###
        self.gesture_recognizer.evaluate_classifier ()






    ########################################################################################################################
    ##############################[ --- Synth Main --- ]####################################################################
    ########################################################################################################################

    # Function: synth_main_disrcete
    # --------------------
    # records discrete gestures and classifies them for you.
    def synth_main_discrete (self):

        self.gesture_recognizer.load_model ()

        print_message ("Recording Discrete events")
        while (True):

            ### Step 1: initialize the gesture ###
            observed_gesture = Gesture ()

            ### Step 2: start the recording ###
            self.record_countdown ()

            ### Step 3: fill it with frames ###
            while not observed_gesture.is_full ():
                frame = self.get_frame ()                
                observed_gesture.add_frame (frame)

            ### Step 4: stop the recording and classify ###
            print_message ("### Recording Complete ###")
            self.gesture_recognizer.classify_gesture (observed_gesture)

            print_message("enter to continue")
            sys.stdin.readline ()



    # Function: get_gesture
    # ---------------------
    # function to wait on gestures
    def get_gesture (self, observed_gesture):

        ### --- add the current frame --- ###
        frame = self.get_frame ()
        observed_gesture.add_frame (frame)

        if observed_gesture.is_full ():

            ### --- get classification results --- ###
            classification_results = self.gesture_recognizer.classify_gesture (observed_gesture)

            ### --- interpret them --- ###
            return self.classification_results


    # Function: get_coords
    # --------------------
    # returns (x,y,z) of the hand
    def get_continuous_coords (self):

        ### --- add the current frame --- ###
        frame = self.get_frame ()

        if len(frame.hands) == 0:
            return None
        else:
            position = hands[0].palm_position
            return (position[0], position[1], posiiton[2])



    # Function: get_position_and_orientation
    # --------------------------------------
    # given a frame, this returns the (palm_position, palm_orientation) if we observe
    # a fist (0 fingers visible); (None, None) otherwise
    def get_position_and_orientation (self, frame):

        ### Step 1: peace out if there are no hands ###
        hands = frame.hands
        if len(hands) == 0:
            return (None, None)

        ### Step 2: peace out if there are any fingers (not a fist) ###
        fingers = hands[0].fingers
        if len (fingers) > 0:
            return (None, None)

        ### Step 3: get position and orientation ###
        hand = hands[0]
        palm_position   = hands[0].palm_position
        position        = (palm_position[0], palm_position[1], palm_position[2])
        palm_normal     = hands[0].palm_normal
        orientation     = (palm_normal[0], palm_normal[1], palm_normal[2])

        return (position, orientation)




    # Function: synth_main
    # --------------------
    # maintains a 70-frame gesture and tries to classify it
    def synth_main (self):
        
        ### Step 1: start the max patch ###
        self.max_interface.send_gesture ('Start')

        ### Step 2: initialize local data ###
        print_message ("Entering Main Loop: Continuous Gesture Recognition")
        self.gesture_recognizer.load_model ()
        observed_gesture = Gesture ()

        ### Step 3: enter main loop ###
        while (True):

            ### Step 1: add the current frame to observed_gesture ###
            frame = self.get_frame ()
            observed_gesture.add_frame (frame)

            ### Step 2: get position and orientation (returns (None, None) if not a fist) ###
            (palm_position, palm_orientation) = self.get_position_and_orientation (frame)

            ### Step 3: Get the gesture, if appropriate ###
            send_gesture = None

            if observed_gesture.is_full ():

                classification_results = self.gesture_recognizer.classify_gesture (observed_gesture)
                if classification_results:
                    prediction = classification_results [0]
                    prediction_prob = classification_results [1]
                    print_message("Prediction: " + str(prediction) + " | Probability: " + str(prediction_prob))
                    send_gesture = prediction
                    observed_gesture.clear ()



            ### Step 4: send a gesture to max if one was observed ###
            if send_gesture:
                self.max_interface.send_gesture (send_gesture)

            ### Step 5: Send hand state to max if one was observed ###
            if len(frame.hands) > 0:
                self.max_interface.send_hand_state (frame.hands[0])
コード例 #7
0
class Controller_Listener(Leap.Listener):

    #--- Member Objects ---
    max_interface = None        # Interface w/ max
    leap_gesture = None         # Recognition of gestures

    #--- Gestures ---
    available_gestures = ["no_hands", "one_hand", "two_hands"]     # list of available gestures


    # Function: Constructor
    # ---------------------
    # creates max_interface and leap_gesture
    def on_init(self, controller):
       
        ### Step 1: create max interface ###
        self.max_interface = Max_Interface ()

        ### Step 2: notify of initialization ###
        print_status ("Controller Listener", "controller initialized")



    ########################################################################################################################
    ##############################[ --- Initialization/Finalization --- ]###################################################
    ########################################################################################################################        


    # Function: on_connect
    # --------------------
    # callback function for when the controller is connected
    def on_connect(self, controller):

        print_status ("Controller Listener", "controller connected")

        # Enable gestures
        controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE);
        controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP);
        controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP);
        controller.enable_gesture(Leap.Gesture.TYPE_SWIPE);

    # Function: on_disconnect
    # -----------------------
    # callback function for when the controller is disconnected
    def on_disconnect(self, controller):

        print_status ("Controller Listener", "Controller disconnected")


    # Function: on_exit
    # -----------------
    # callback function for exit of the program
    def on_exit(self, controller):
 
        print_status ("Controller Listener", "Exiting")





    ########################################################################################################################
    ##############################[ --- Frame Processing --- ]##############################################################
    ########################################################################################################################        

    # Function: print_frame
    # ---------------------
    # prints a user-readable format of the current frame
    def print_frame (self, frame):

        print "Frame id: %d, timestamp: %d, hands: %d, fingers: %d, tools: %d, gestures: %d" % (
              frame.id, frame.timestamp, len(frame.hands), len(frame.fingers), len(frame.tools), len(frame.gestures()))


    # Function: on_frame
    # ------------------
    # this function is called for every frame that is observed from the leap.
    def on_frame(self, controller):

        frame = controller.frame()

        if len(frame.hands) == 0:
            self.max_interface.send_gesture ("no_hands")
        elif len(frame.hands) == 1:
            self.max_interface.send_gesture ("one_hand")
        elif len(frame.hands) == 2:
            self.max_interface.send_gesture ("two_hands")

        self.print_frame (frame)

        if not frame.hands.is_empty:
            # Get the first hand
            hand = frame.hands[0]

            # Check if the hand has any fingers
            fingers = hand.fingers
            if not fingers.is_empty:
                # Calculate the hand's average finger tip position
                avg_pos = Leap.Vector()
                for finger in fingers:
                    avg_pos += finger.tip_position
                avg_pos /= len(fingers)
                print "Hand has %d fingers, average finger tip position: %s" % (
                      len(fingers), avg_pos)

            # Get the hand's sphere radius and palm position
            print "Hand sphere radius: %f mm, palm position: %s" % (
                  hand.sphere_radius, hand.palm_position)

            # Get the hand's normal vector and direction
            normal = hand.palm_normal
            direction = hand.direction

            # Calculate the hand's pitch, roll, and yaw angles
            print "Hand pitch: %f degrees, roll: %f degrees, yaw: %f degrees" % (
                direction.pitch * Leap.RAD_TO_DEG,
                normal.roll * Leap.RAD_TO_DEG,
                direction.yaw * Leap.RAD_TO_DEG)

            # Gestures
            for gesture in frame.gestures():
                if gesture.type == Leap.Gesture.TYPE_CIRCLE:
                    circle = CircleGesture(gesture)

                    # Determine clock direction using the angle between the pointable and the circle normal
                    if circle.pointable.direction.angle_to(circle.normal) <= Leap.PI/4:
                        clockwiseness = "clockwise"
                    else:
                        clockwiseness = "counterclockwise"

                    # Calculate the angle swept since the last frame
                    swept_angle = 0
                    if circle.state != Leap.Gesture.STATE_START:
                        previous_update = CircleGesture(controller.frame(1).gesture(circle.id))
                        swept_angle =  (circle.progress - previous_update.progress) * 2 * Leap.PI

                    print "Circle id: %d, %s, progress: %f, radius: %f, angle: %f degrees, %s" % (
                            gesture.id, self.state_string(gesture.state),
                            circle.progress, circle.radius, swept_angle * Leap.RAD_TO_DEG, clockwiseness)

                if gesture.type == Leap.Gesture.TYPE_SWIPE:
                    swipe = SwipeGesture(gesture)
                    print "Swipe id: %d, state: %s, position: %s, direction: %s, speed: %f" % (
                            gesture.id, self.state_string(gesture.state),
                            swipe.position, swipe.direction, swipe.speed)

                if gesture.type == Leap.Gesture.TYPE_KEY_TAP:
                    keytap = KeyTapGesture(gesture)
                    print "Key Tap id: %d, %s, position: %s, direction: %s" % (
                            gesture.id, self.state_string(gesture.state),
                            keytap.position, keytap.direction )

                if gesture.type == Leap.Gesture.TYPE_SCREEN_TAP:
                    screentap = ScreenTapGesture(gesture)
                    print "Screen Tap id: %d, %s, position: %s, direction: %s" % (
                            gesture.id, self.state_string(gesture.state),
                            screentap.position, screentap.direction )

        if not (frame.hands.is_empty and frame.gestures().is_empty):
            print ""



    def state_string(self, state):
        if state == Leap.Gesture.STATE_START:
            return "STATE_START"

        if state == Leap.Gesture.STATE_UPDATE:
            return "STATE_UPDATE"

        if state == Leap.Gesture.STATE_STOP:
            return "STATE_STOP"

        if state == Leap.Gesture.STATE_INVALID:
            return "STATE_INVALID"
コード例 #8
0
class Controller_Listener(Leap.Listener):

    #--- Member Objects ---
    max_interface = None  # Interface w/ max
    leap_gesture = None  # Recognition of gestures

    #--- Gestures ---
    available_gestures = ["no_hands", "one_hand",
                          "two_hands"]  # list of available gestures

    # Function: Constructor
    # ---------------------
    # creates max_interface and leap_gesture
    def on_init(self, controller):

        ### Step 1: create max interface ###
        self.max_interface = Max_Interface()

        ### Step 2: notify of initialization ###
        print_status("Controller Listener", "controller initialized")

    ########################################################################################################################
    ##############################[ --- Initialization/Finalization --- ]###################################################
    ########################################################################################################################

    # Function: on_connect
    # --------------------
    # callback function for when the controller is connected
    def on_connect(self, controller):

        print_status("Controller Listener", "controller connected")

        # Enable gestures
        controller.enable_gesture(Leap.Gesture.TYPE_CIRCLE)
        controller.enable_gesture(Leap.Gesture.TYPE_KEY_TAP)
        controller.enable_gesture(Leap.Gesture.TYPE_SCREEN_TAP)
        controller.enable_gesture(Leap.Gesture.TYPE_SWIPE)

    # Function: on_disconnect
    # -----------------------
    # callback function for when the controller is disconnected
    def on_disconnect(self, controller):

        print_status("Controller Listener", "Controller disconnected")

    # Function: on_exit
    # -----------------
    # callback function for exit of the program
    def on_exit(self, controller):

        print_status("Controller Listener", "Exiting")

    ########################################################################################################################
    ##############################[ --- Frame Processing --- ]##############################################################
    ########################################################################################################################

    # Function: print_frame
    # ---------------------
    # prints a user-readable format of the current frame
    def print_frame(self, frame):

        print "Frame id: %d, timestamp: %d, hands: %d, fingers: %d, tools: %d, gestures: %d" % (
            frame.id, frame.timestamp, len(frame.hands), len(
                frame.fingers), len(frame.tools), len(frame.gestures()))

    # Function: on_frame
    # ------------------
    # this function is called for every frame that is observed from the leap.
    def on_frame(self, controller):

        frame = controller.frame()

        if len(frame.hands) == 0:
            self.max_interface.send_gesture("no_hands")
        elif len(frame.hands) == 1:
            self.max_interface.send_gesture("one_hand")
        elif len(frame.hands) == 2:
            self.max_interface.send_gesture("two_hands")

        self.print_frame(frame)

        if not frame.hands.is_empty:
            # Get the first hand
            hand = frame.hands[0]

            # Check if the hand has any fingers
            fingers = hand.fingers
            if not fingers.is_empty:
                # Calculate the hand's average finger tip position
                avg_pos = Leap.Vector()
                for finger in fingers:
                    avg_pos += finger.tip_position
                avg_pos /= len(fingers)
                print "Hand has %d fingers, average finger tip position: %s" % (
                    len(fingers), avg_pos)

            # Get the hand's sphere radius and palm position
            print "Hand sphere radius: %f mm, palm position: %s" % (
                hand.sphere_radius, hand.palm_position)

            # Get the hand's normal vector and direction
            normal = hand.palm_normal
            direction = hand.direction

            # Calculate the hand's pitch, roll, and yaw angles
            print "Hand pitch: %f degrees, roll: %f degrees, yaw: %f degrees" % (
                direction.pitch * Leap.RAD_TO_DEG,
                normal.roll * Leap.RAD_TO_DEG, direction.yaw * Leap.RAD_TO_DEG)

            # Gestures
            for gesture in frame.gestures():
                if gesture.type == Leap.Gesture.TYPE_CIRCLE:
                    circle = CircleGesture(gesture)

                    # Determine clock direction using the angle between the pointable and the circle normal
                    if circle.pointable.direction.angle_to(
                            circle.normal) <= Leap.PI / 4:
                        clockwiseness = "clockwise"
                    else:
                        clockwiseness = "counterclockwise"

                    # Calculate the angle swept since the last frame
                    swept_angle = 0
                    if circle.state != Leap.Gesture.STATE_START:
                        previous_update = CircleGesture(
                            controller.frame(1).gesture(circle.id))
                        swept_angle = (circle.progress -
                                       previous_update.progress) * 2 * Leap.PI

                    print "Circle id: %d, %s, progress: %f, radius: %f, angle: %f degrees, %s" % (
                        gesture.id, self.state_string(
                            gesture.state), circle.progress, circle.radius,
                        swept_angle * Leap.RAD_TO_DEG, clockwiseness)

                if gesture.type == Leap.Gesture.TYPE_SWIPE:
                    swipe = SwipeGesture(gesture)
                    print "Swipe id: %d, state: %s, position: %s, direction: %s, speed: %f" % (
                        gesture.id, self.state_string(gesture.state),
                        swipe.position, swipe.direction, swipe.speed)

                if gesture.type == Leap.Gesture.TYPE_KEY_TAP:
                    keytap = KeyTapGesture(gesture)
                    print "Key Tap id: %d, %s, position: %s, direction: %s" % (
                        gesture.id, self.state_string(
                            gesture.state), keytap.position, keytap.direction)

                if gesture.type == Leap.Gesture.TYPE_SCREEN_TAP:
                    screentap = ScreenTapGesture(gesture)
                    print "Screen Tap id: %d, %s, position: %s, direction: %s" % (
                        gesture.id, self.state_string(gesture.state),
                        screentap.position, screentap.direction)

        if not (frame.hands.is_empty and frame.gestures().is_empty):
            print ""

    def state_string(self, state):
        if state == Leap.Gesture.STATE_START:
            return "STATE_START"

        if state == Leap.Gesture.STATE_UPDATE:
            return "STATE_UPDATE"

        if state == Leap.Gesture.STATE_STOP:
            return "STATE_STOP"

        if state == Leap.Gesture.STATE_INVALID:
            return "STATE_INVALID"