def __init__(self):
     logger.debug("Keyboard deteccter created.")
     self.keyboardList = self.find_keyboard()
     self.threads = []
     self.inputRecord = []
     self.hotKey = [16, 30, 44, 2, 3, 4]  # QAZ123
     self.sp = SoundPlayer()
     self.GUIID = None
 def init_sounds(self):
     self.sound_player = SoundPlayer(debug=self.debug)
     self.sound_player.load("approaching", "sounds/approaching.ogg")
     self.sound_player.load("keeping distance", "sounds/keeping_distance.ogg")
     self.sound_player.load("landing", "sounds/landing.ogg")
     self.sound_player.load("palm landing", "sounds/palm_landing.ogg")
     self.sound_player.load("taking picture", "sounds/taking_picture.ogg")
     self.sound_player.load("free", "sounds/free.ogg")
     self.tone = Tone()
 def __init__(self):
     logger.debug("Keyboard deteccter created.")
     self.keyboardList = self.find_keyboard()
     self.threads = []
     self.inputRecord = []
     self.hotKey = [16, 30, 44, 2, 3, 4]  # QAZ123
     self.sp = SoundPlayer()
     self.GUIID = None
Exemple #4
0
    def __init__(self, width, height, image):
        super().__init__() # call the super constructor

        self.soundPlayer = SoundPlayer()
        # -- sprite configuration -- 
        # configure the image
        self.image = pygame.image.load("../res/img/" + image)
        self.image = pygame.transform.scale(self.image, (width, height))
        #self.image.set_colorkey(key)

        # return the rect
        self.rect = self.image.get_rect()

        self.rect.x = 30
        self.rect.y = 30

        # -- physics configuration --
        self.jumping = False # jump when true
        self.velocity = 5 # character's velocity
        self.mass = 1 # character's mass
        self.health = 100
    def __init__(self, config, set_will_message=True, use_tts=True):
        global anchor_coordinates
        # Check if UHCService is run by the current process (no key 'service' in config dict)
        # or by a server process (that can be on a remote computer, 'service':{'ip':"1.20.1.2", 'port':5555})
        self.config = merge_config(DEFAULT_CONFIG, config)
        # Sensor args
        if 'args' not in self.config['sensor'] or not isinstance(
                self.config['sensor']['args'], dict):
            self.config['sensor']['args'] = DEFAULT_SENSOR_ARGS[
                self.config['sensor']['class']]
        else:
            self.config['sensor']['args'] = merge_config(
                DEFAULT_SENSOR_ARGS[self.config['sensor']['class']],
                self.config['sensor']['args'])
        self.local = 'service_connect' not in self.config
        self.use_tts = use_tts
        # Initialize TTS
        if use_tts:
            if sys.platform == "linux":
                voice_id = None  #"klatt4"
            elif sys.platform == "win32":
                voice_id = 'HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Speech\\Voices\\Tokens\\TTS_MS_EN-US_DAVID_11.0'
            self.tts = TTS(voice_id=voice_id)
            self.tts.start()
        # Check if there are airzones with point coordinates are relative to an anchor.
        # If yes, we first need to ask the user to interactively defined the position of these anchor,
        # in order to replace relative by absolute corrdinates
        if "airzones" in self.config:
            for a in self.config['airzones']:
                if a.get("coordinates_type", self.config['airzone_params']
                         ['coordinates_type']) == 'relative_to_anchor':
                    anchor_coordinates = None
                    self.say(
                        f"Setting of the {a['type']} called {a['name']}.. Please place your hand at the position you want and keep it still for a few seconds."
                    )
                    ask_user_for_anchor_position(
                        a['name'], a['type'],
                        self.config.get('service_connect', None),
                        self.config['sensor'])
                    self.say("Thank you")
                    a['coordinates_type'] = 'absolute'
                    dx, dy, dz = anchor_coordinates
                    new_points = []
                    for x, y, z in a['points']:
                        if a.get("airzone_move", self.config['airzone_params']
                                 ['airzone_move']) == "translation":
                            xa, ya, za = x + dx, y + dy, z + dz
                            new_points.append((xa, ya, za))
                    a['points'] = new_points

        self.app_globals = sys._getframe(
            1
        ).f_globals  # Used to be access and run the callbacks defined in the calling app

        if self.local:
            from UniversalHandControlService import UniversalHandControlService
            self.uhcs = UniversalHandControlService(self.config)
        else:
            # We access UHCService via mqtt
            import paho.mqtt.client as mqtt
            assert "app_name" in self.config, "There is no key 'app_name' defined in config"
            self.app_name = config["app_name"]
            assert isinstance(
                config['service_connect'], dict
            ) and 'host' in config[
                'service_connect'], "Value of 'service_connect' in config must be a dict and contain 'host'"

            connect_args = config['service_connect']
            self.client = mqtt.Client(client_id=self.app_name, userdata=self)
            self.client.on_connect = UHC_on_connect
            self.client.on_message = UHC_on_message
            # Will message will be send by the broker on behalf of the app,
            # if the app disconnects ungracefully
            if set_will_message:
                self.client.will_set(MQTT_SERVICE_TOPIC,
                                     payload=jsonpickle.encode({
                                         'type':
                                         'EXIT',
                                         'name':
                                         self.app_name
                                     }),
                                     qos=0,
                                     retain=False)
            self.client.connect(**connect_args)

            # Send config to server in INIT message
            msg = {"type": "INIT", "config": self.config}
            self.client.publish(MQTT_SERVICE_TOPIC, jsonpickle.encode(msg))

        # Initialize SounPlayer
        self.soundplayer = SoundPlayer()
        self.soundplayer.start()

        print("UHC: init finished")
class KeyboardHandler():

    def __init__(self):
        logger.debug("Keyboard deteccter created.")
        self.keyboardList = self.find_keyboard()
        self.threads = []
        self.inputRecord = []
        self.hotKey = [16, 30, 44, 2, 3, 4]  # QAZ123
        self.sp = SoundPlayer()
        self.GUIID = None

    # list all event's name and its device
    def show_device(self):
        # os.chdir(deviceFilePath)
        for i in os.listdir(deviceFilePath):
            namePath = deviceFilePath + i + '/device/name'
            if os.path.isfile(namePath):
                logger.info("Name: %s Device: %s" % (i, file(namePath).read()))

    def set_style(self, style):
        self.sp.set_style(style)

    def set_volume(self, volume):
        self.sp.set_volume(volume)

    def set_pitch(self, pitch):
        self.sp.set_pitch(pitch)

    def get_player_infor(self):
        return self.sp.get_infor()

    @property
    def GUIID(self):
        return self.GUIID

    def show_GUI(self):
        if not self.GUIID:
            return
        # command = "xdotool windowactivate --sync %s" % self.GUIID
        command = "xdotool windowmap --sync %s && xdotool windowactivate --sync %s" % (self.GUIID, self.GUIID)
        commands.getstatusoutput(command)

    # new way to find keyboard
    # return with a list of keyboard's event
    def find_keyboard(self):
        keyboardList = []

        deviceInfo = open('/proc/bus/input/devices').read().lower().split('\n\n')
        for i in filter(lambda i: (not re.search('mouse', i) and re.search('bus=0003', i)) or re.search('keyboard', i), deviceInfo):
            m = re.search('event\d+', i)
            if m:
                keyboardList.append(m.group())
        assert len(keyboardList) > 0
        return keyboardList

    # return with a list of keyboard's event
    # def find_keyboard(self):
    #     # os.chdir(deviceFilePath)
    #     keyboardList = []
    #     for event in os.listdir(deviceFilePath):
    #         namePath = deviceFilePath + event + '/device/name'
    #         namePathExist = os.path.isfile(namePath)
    #         deviceName = file(namePath).read().lower() if namePathExist else None

    #         if deviceName and deviceName.find('keyboard') != -1:
    #             keyboardList.append(event)
    #             continue

    #         # check other USB device
    #         bustypePath = deviceFilePath + event + '/device/id/bustype'
    #         bustypePathExist = os.path.isfile(bustypePath)
    #         bustype = file(bustypePath).read() if bustypePathExist else None
    #         # 0003 = USB device, consider it !mouse = keyboard :)
    #         isUSBKB = bustype and bustype.find('0003') != -1 and \
    #             deviceName and deviceName.find('mouse') == -1
    #         if isUSBKB:
    #             keyboardList.append(event)

    #     try:
    #         logger.debug(keyboardList)
    #         assert len(keyboardList) > 0
    #     except AssertionError:
    #         logger.error("Keyborad Not Found!!")
    #     return keyboardList

    # event.value:1 for pressed, 0 for release
    def detect_input_from_event(self, eventName):
        dev = InputDevice('/dev/input/' + eventName)
        while True:
            select([dev], [], [])
            for event in dev.read():
                if (event.value == 1 or event.value == 0) and event.code != 0:
                    if event.value == 1:
                        self.sp.play(event.code)
                        self.check_show_window(event.code)
                    logger.debug(
                        "Key: %s Status: %s" %
                        (event.code, "pressed" if event.value else "release"))

    # check input if satisfy the hotkey
    def check_show_window(self, keycode):
        if len(self.inputRecord) > 0 and keycode == self.inputRecord[-1]:
            return
        if keycode == self.hotKey[len(self.inputRecord)]:
            self.inputRecord.append(keycode)
            logger.debug(self.inputRecord)
            if len(self.inputRecord) == 6:
                self.show_GUI()
                self.inputRecord = []
        else:
            self.inputRecord = []

    def start_detecting(self):

        for i in self.keyboardList:
            t = threading.Thread(target=self.detect_input_from_event, args=(i,))
            self.threads.append(t)
            t.start()

    # kill all threads
    def stop_detecting(self):
        for t in self.threads:
            t._Thread__stop()
    # Plot monophonic (mono) samples
    # - title: title to give to the plot
    def plotMono(self, title):
        if not self.doPlots:
            return
        fig, ax1 = plt.subplots()
        ax1.set_title(title)
        ax1.plot(self.samplesMono, 'b')
        ax1.set_xlabel("Sample Number")
        ax1.set_ylabel("Amplitude")
        plt.show()


# Set to True to test this class
TEST = False
if 'google.colab' in sys.modules or 'jupyter_client' in sys.modules:
    Test = False

if TEST:
    # Define wav filename used:
    s1Filename = "../audio/carrier.wav"
    s2Filename = "../audio/rockA.wav"
    s3Filename = "../audio/rockB.wav"
    display(s2Filename)
    wavClass = WavClass(wavFileName=s2Filename, doPlots=True)
    # code below only works in a notebook:
    #display(Audio(wavClass.samples, rate=wavClass.sampleRate, normalize=False));
    from SoundPlayer import SoundPlayer
    SoundPlayer().playWav(wavClass.samplesMono, wavClass.sampleRate)
Exemple #8
0
odom_table = NetworkTables.getTable('odom')
original_image = cv2.imread("2020-Field.png")

while True:
    frame = original_image.copy()
    field_x = NetworkTables.getEntry("/odom/field_x").getDouble(1)
    field_y = NetworkTables.getEntry("/odom/field_y").getDouble(0)
    field_theta = NetworkTables.getEntry("/odom/field_t").getDouble(180)
    draw_robot(frame, field_x, field_y, field_theta)

    drivetrain_state = NetworkTables.getEntry("/drivetrain/state").getString(
        "OPEN_LOOP")
    auto_state = NetworkTables.getEntry("/drivetrain/auto_state").getString(
        "MANUAL")
    if auto_state == "AUTO_SHOOT_BALL" or auto_state == "AUTO_INTAKE_BALL":
        SoundPlayer.beep(1)
    elif auto_state == "AUTO_INTAKE_NO_BALL":
        SoundPlayer.beep(2)
    else:
        SoundPlayer.beep(0)

    if drivetrain_state == "TRAJECTORY_FOLLOWING":
        trajectory = NetworkTables.getEntry("/drivetrain/trajectory").\
            getDoubleArray([])
        if len(trajectory) == 200:
            draw_trajectory(frame, trajectory)

    if NetworkTables.getEntry("/odom/target_found").getBoolean(False):
        camera_x = NetworkTables.getEntry("/odom/CV/camera_x").getDouble(1)
        camera_y = NetworkTables.getEntry("/odom/CV/camera_y").getDouble(1)
        camera_t = NetworkTables.getEntry("/odom/CV/camera_t").getDouble(
Exemple #9
0
class TelloController(object):
    """
    TelloController builds keyboard controls on top of TelloPy as well
    as generating images from the video stream and enabling opencv support
    """
    def __init__(self,
                 use_face_tracking=True,
                 kbd_layout="QWERTY",
                 write_log_data=False,
                 media_directory="media",
                 child_cnx=None,
                 log_level=None):

        self.log_level = log_level
        self.debug = log_level is not None
        self.child_cnx = child_cnx
        self.use_multiprocessing = child_cnx is not None
        self.kbd_layout = kbd_layout
        # Flight data
        self.is_flying = False
        self.battery = None
        self.fly_mode = None
        self.throw_fly_timer = 0

        self.tracking_after_takeoff = False
        self.record = False
        self.keydown = False
        self.date_fmt = '%Y-%m-%d_%H%M%S'
        self.drone = tellopy.Tello(
            start_recv_thread=not self.use_multiprocessing)
        self.axis_command = {
            "yaw": self.drone.clockwise,
            "roll": self.drone.right,
            "pitch": self.drone.forward,
            "throttle": self.drone.up
        }
        self.axis_speed = {"yaw": 0, "roll": 0, "pitch": 0, "throttle": 0}
        self.cmd_axis_speed = {"yaw": 0, "roll": 0, "pitch": 0, "throttle": 0}
        self.prev_axis_speed = self.axis_speed.copy()
        self.def_speed = {"yaw": 50, "roll": 35, "pitch": 35, "throttle": 80}

        self.write_log_data = write_log_data
        self.reset()
        self.media_directory = media_directory
        if not os.path.isdir(self.media_directory):
            os.makedirs(self.media_directory)

        if self.write_log_data:
            path = 'tello-%s.csv' % datetime.datetime.now().strftime(
                '%Y-%m-%d_%H%M%S')
            self.log_file = open(path, 'w')
            self.write_header = True

        self.init_drone()
        if not self.use_multiprocessing:
            self.init_sounds()
            self.init_controls()

        # container for processing the packets into frames
        self.container = av.open(self.drone.get_video_stream())
        self.vid_stream = self.container.streams.video[0]
        self.out_file = None
        self.out_stream = None
        self.out_name = None
        self.start_time = time.time()

        # Setup Openpose
        if not self.use_multiprocessing:

            self.op = OP(number_people_max=1, min_size=25, debug=self.debug)
        self.use_openpose = False

        self.morse = CameraMorse(display=False)
        self.morse.define_command("---", self.delayed_takeoff)
        self.morse.define_command("...", self.throw_and_go, {'tracking': True})
        self.is_pressed = False

        self.fps = FPS()

        self.exposure = 0

        if self.debug:
            self.graph_pid = RollingGraph(window_name="PID",
                                          step_width=2,
                                          width=2000,
                                          height=500,
                                          y_max=200,
                                          colors=[(255, 255, 255),
                                                  (255, 200, 0), (0, 0, 255),
                                                  (0, 255, 0)],
                                          thickness=[2, 2, 2, 2],
                                          threshold=100,
                                          waitKey=False)

        # Logging
        self.log_level = log_level
        if log_level is not None:
            if log_level == "info":
                log_level = logging.INFO
            elif log_level == "debug":
                log_level = logging.DEBUG
            log.setLevel(log_level)
            ch = logging.StreamHandler(sys.stdout)
            ch.setLevel(log_level)
            ch.setFormatter(
                logging.Formatter(
                    fmt=
                    '%(asctime)s.%(msecs)03d - %(name)s - %(levelname)s - %(message)s',
                    datefmt="%H:%M:%S"))
            log.addHandler(ch)

    def set_video_encoder_rate(self, rate):
        self.drone.set_video_encoder_rate(rate)
        self.video_encoder_rate = rate

    def reset(self):
        """
            Reset global variables before a fly
        """
        log.debug("RESET")
        self.ref_pos_x = -1
        self.ref_pos_y = -1
        self.ref_pos_z = -1
        self.pos_x = -1
        self.pos_y = -1
        self.pos_z = -1
        self.yaw = 0
        self.tracking = False
        self.keep_distance = None
        self.palm_landing = False
        self.palm_landing_approach = False
        self.yaw_to_consume = 0
        self.timestamp_keep_distance = time.time()
        self.wait_before_tracking = None
        self.timestamp_take_picture = None
        self.throw_ongoing = False
        self.scheduled_takeoff = None
        # When in trackin mode, but no body is detected in current frame,
        # we make the drone rotate in the hope to find some body
        # The rotation is done in the same direction as the last rotation done
        self.body_in_prev_frame = False
        self.timestamp_no_body = time.time()
        self.last_rotation_is_cw = True

    def init_drone(self):
        """
            Connect to the drone, start streaming and subscribe to events
        """
        if self.log_level:
            self.drone.log.set_level(2)
        self.drone.connect()
        self.set_video_encoder_rate(2)
        self.drone.start_video()

        self.drone.subscribe(self.drone.EVENT_FLIGHT_DATA,
                             self.flight_data_handler)
        self.drone.subscribe(self.drone.EVENT_LOG_DATA, self.log_data_handler)
        self.drone.subscribe(self.drone.EVENT_FILE_RECEIVED,
                             self.handle_flight_received)

    def init_sounds(self):
        self.sound_player = SoundPlayer(debug=self.debug)
        self.sound_player.load("approaching", "sounds/approaching.ogg")
        self.sound_player.load("keeping distance",
                               "sounds/keeping_distance.ogg")
        self.sound_player.load("landing", "sounds/landing.ogg")
        self.sound_player.load("palm landing", "sounds/palm_landing.ogg")
        self.sound_player.load("taking picture", "sounds/taking_picture.ogg")
        self.sound_player.load("free", "sounds/free.ogg")
        self.tone = Tone()

    def on_press(self, keyname):
        """
            Handler for keyboard listener
        """
        if self.keydown:
            return
        try:
            self.keydown = True
            keyname = str(keyname).strip('\'')
            log.info('KEY PRESS ' + keyname)
            if keyname == 'Key.esc':
                self.toggle_tracking(False)
                # self.tracking = False
                self.drone.land()
                self.drone.quit()
                if self.child_cnx:
                    # Tell to the parent process that it's time to exit
                    self.child_cnx.send("EXIT")
                cv2.destroyAllWindows()
                os._exit(0)
            if keyname in self.controls_keypress:
                self.controls_keypress[keyname]()
        except AttributeError:
            log.debug(f'special key {keyname} pressed')

    def on_release(self, keyname):
        """
            Reset on key up from keyboard listener
        """
        self.keydown = False
        keyname = str(keyname).strip('\'')
        log.info('KEY RELEASE ' + keyname)
        if keyname in self.controls_keyrelease:
            key_handler = self.controls_keyrelease[keyname]()

    def set_speed(self, axis, speed):
        log.info(f"set speed {axis} {speed}")
        self.cmd_axis_speed[axis] = speed

    def init_controls(self):
        """
            Define keys and add listener
        """

        controls_keypress_QWERTY = {
            'w':
            lambda: self.set_speed("pitch", self.def_speed["pitch"]),
            's':
            lambda: self.set_speed("pitch", -self.def_speed["pitch"]),
            'a':
            lambda: self.set_speed("roll", -self.def_speed["roll"]),
            'd':
            lambda: self.set_speed("roll", self.def_speed["roll"]),
            'q':
            lambda: self.set_speed("yaw", -self.def_speed["yaw"]),
            'e':
            lambda: self.set_speed("yaw", self.def_speed["yaw"]),
            'i':
            lambda: self.drone.flip_forward(),
            'k':
            lambda: self.drone.flip_back(),
            'j':
            lambda: self.drone.flip_left(),
            'l':
            lambda: self.drone.flip_right(),
            'Key.left':
            lambda: self.set_speed("yaw", -1.5 * self.def_speed["yaw"]),
            'Key.right':
            lambda: self.set_speed("yaw", 1.5 * self.def_speed["yaw"]),
            'Key.up':
            lambda: self.set_speed("throttle", self.def_speed["throttle"]),
            'Key.down':
            lambda: self.set_speed("throttle", -self.def_speed["throttle"]),
            'Key.tab':
            lambda: self.drone.takeoff(),
            'Key.backspace':
            lambda: self.drone.land(),
            'p':
            lambda: self.palm_land(),
            't':
            lambda: self.toggle_tracking(),
            'o':
            lambda: self.toggle_openpose(),
            'Key.enter':
            lambda: self.take_picture(),
            'c':
            lambda: self.clockwise_degrees(360),
            '0':
            lambda: self.drone.set_video_encoder_rate(0),
            '1':
            lambda: self.drone.set_video_encoder_rate(1),
            '2':
            lambda: self.drone.set_video_encoder_rate(2),
            '3':
            lambda: self.drone.set_video_encoder_rate(3),
            '4':
            lambda: self.drone.set_video_encoder_rate(4),
            '5':
            lambda: self.drone.set_video_encoder_rate(5),
            '7':
            lambda: self.set_exposure(-1),
            '8':
            lambda: self.set_exposure(0),
            '9':
            lambda: self.set_exposure(1)
        }

        controls_keyrelease_QWERTY = {
            'w': lambda: self.set_speed("pitch", 0),
            's': lambda: self.set_speed("pitch", 0),
            'a': lambda: self.set_speed("roll", 0),
            'd': lambda: self.set_speed("roll", 0),
            'q': lambda: self.set_speed("yaw", 0),
            'e': lambda: self.set_speed("yaw", 0),
            'Key.left': lambda: self.set_speed("yaw", 0),
            'Key.right': lambda: self.set_speed("yaw", 0),
            'Key.up': lambda: self.set_speed("throttle", 0),
            'Key.down': lambda: self.set_speed("throttle", 0)
        }

        controls_keypress_AZERTY = {
            'z':
            lambda: self.set_speed("pitch", self.def_speed["pitch"]),
            's':
            lambda: self.set_speed("pitch", -self.def_speed["pitch"]),
            'q':
            lambda: self.set_speed("roll", -self.def_speed["roll"]),
            'd':
            lambda: self.set_speed("roll", self.def_speed["roll"]),
            'a':
            lambda: self.set_speed("yaw", -self.def_speed["yaw"]),
            'e':
            lambda: self.set_speed("yaw", self.def_speed["yaw"]),
            'i':
            lambda: self.drone.flip_forward(),
            'k':
            lambda: self.drone.flip_back(),
            'j':
            lambda: self.drone.flip_left(),
            'l':
            lambda: self.drone.flip_right(),
            'Key.left':
            lambda: self.set_speed("yaw", -1.5 * self.def_speed["yaw"]),
            'Key.right':
            lambda: self.set_speed("yaw", 1.5 * self.def_speed["yaw"]),
            'Key.up':
            lambda: self.set_speed("throttle", self.def_speed["throttle"]),
            'Key.down':
            lambda: self.set_speed("throttle", -self.def_speed["throttle"]),
            'Key.tab':
            lambda: self.drone.takeoff(),
            'Key.backspace':
            lambda: self.drone.land(),
            'p':
            lambda: self.palm_land(),
            't':
            lambda: self.toggle_tracking(),
            'o':
            lambda: self.toggle_openpose(),
            'Key.enter':
            lambda: self.take_picture(),
            'c':
            lambda: self.clockwise_degrees(360),
            '0':
            lambda: self.drone.set_video_encoder_rate(0),
            '1':
            lambda: self.drone.set_video_encoder_rate(1),
            '2':
            lambda: self.drone.set_video_encoder_rate(2),
            '3':
            lambda: self.drone.set_video_encoder_rate(3),
            '4':
            lambda: self.drone.set_video_encoder_rate(4),
            '5':
            lambda: self.drone.set_video_encoder_rate(5),
            '7':
            lambda: self.set_exposure(-1),
            '8':
            lambda: self.set_exposure(0),
            '9':
            lambda: self.set_exposure(1)
        }

        controls_keyrelease_AZERTY = {
            'z': lambda: self.set_speed("pitch", 0),
            's': lambda: self.set_speed("pitch", 0),
            'q': lambda: self.set_speed("roll", 0),
            'd': lambda: self.set_speed("roll", 0),
            'a': lambda: self.set_speed("yaw", 0),
            'e': lambda: self.set_speed("yaw", 0),
            'Key.left': lambda: self.set_speed("yaw", 0),
            'Key.right': lambda: self.set_speed("yaw", 0),
            'Key.up': lambda: self.set_speed("throttle", 0),
            'Key.down': lambda: self.set_speed("throttle", 0)
        }

        if self.kbd_layout == "AZERTY":
            self.controls_keypress = controls_keypress_AZERTY
            self.controls_keyrelease = controls_keyrelease_AZERTY
        else:
            self.controls_keypress = controls_keypress_QWERTY
            self.controls_keyrelease = controls_keyrelease_QWERTY
        self.key_listener = keyboard.Listener(on_press=self.on_press,
                                              on_release=self.on_release)
        self.key_listener.start()

    def check_pose(self, w, h):
        """
            Check if we detect a pose in the body detected by Openpose
        """

        neck = self.op.get_body_kp("Neck")
        r_wrist = self.op.get_body_kp("RWrist")
        l_wrist = self.op.get_body_kp("LWrist")
        r_elbow = self.op.get_body_kp("RElbow")
        l_elbow = self.op.get_body_kp("LElbow")
        r_shoulder = self.op.get_body_kp("RShoulder")
        l_shoulder = self.op.get_body_kp("LShoulder")
        r_ear = self.op.get_body_kp("REar")
        l_ear = self.op.get_body_kp("LEar")

        self.shoulders_width = distance(
            r_shoulder, l_shoulder) if r_shoulder and l_shoulder else None

        vert_angle_right_arm = vertical_angle(r_wrist, r_elbow)
        vert_angle_left_arm = vertical_angle(l_wrist, l_elbow)

        left_hand_up = neck and l_wrist and l_wrist[1] < neck[1]
        right_hand_up = neck and r_wrist and r_wrist[1] < neck[1]

        if right_hand_up:
            if not left_hand_up:
                # Only right arm up
                if r_ear and (r_ear[0] - neck[0]) * (r_wrist[0] - neck[0]) > 0:
                    # Right ear and right hand on the same side
                    if vert_angle_right_arm:
                        if vert_angle_right_arm < -15:
                            return "RIGHT_ARM_UP_OPEN"
                        if 15 < vert_angle_right_arm < 90:
                            return "RIGHT_ARM_UP_CLOSED"
                elif l_ear and self.shoulders_width and distance(
                        r_wrist, l_ear) < self.shoulders_width / 4:
                    # Right hand close to left ear
                    return "RIGHT_HAND_ON_LEFT_EAR"
            else:
                # Both hands up
                # Check if both hands are on the ears
                if r_ear and l_ear:
                    ear_dist = distance(r_ear, l_ear)
                    if distance(r_wrist, r_ear) < ear_dist / 3 and distance(
                            l_wrist, l_ear) < ear_dist / 3:
                        return ("HANDS_ON_EARS")
                # Check if boths hands are closed to each other and above ears
                # (check right hand is above right ear is enough since hands are closed to each other)
                if self.shoulders_width and r_ear:
                    near_dist = self.shoulders_width / 3
                    if r_ear[1] > r_wrist[1] and distance(r_wrist,
                                                          l_wrist) < near_dist:
                        return "CLOSE_HANDS_UP"

        else:
            if left_hand_up:
                # Only left arm up
                if l_ear and (l_ear[0] - neck[0]) * (l_wrist[0] - neck[0]) > 0:
                    # Left ear and left hand on the same side
                    if vert_angle_left_arm:
                        if vert_angle_left_arm < -15:
                            return "LEFT_ARM_UP_CLOSED"
                        if 15 < vert_angle_left_arm < 90:
                            return "LEFT_ARM_UP_OPEN"
                elif r_ear and self.shoulders_width and distance(
                        l_wrist, r_ear) < self.shoulders_width / 4:
                    # Left hand close to right ear
                    return "LEFT_HAND_ON_RIGHT_EAR"
            else:
                # Both wrists under the neck
                if neck and self.shoulders_width and r_wrist and l_wrist:
                    near_dist = self.shoulders_width / 3
                    if distance(r_wrist, neck) < near_dist and distance(
                            l_wrist, neck) < near_dist:
                        return "HANDS_ON_NECK"

        return None

    def process_frame(self, raw_frame):
        """
            Analyze the frame and return the frame with information (HUD, openpose skeleton) drawn on it
        """

        frame = raw_frame.copy()
        h, w, _ = frame.shape
        proximity = int(w / 2.6)
        min_distance = int(w / 2)

        # Is there a scheduled takeoff ?
        if self.scheduled_takeoff and time.time() > self.scheduled_takeoff:

            self.scheduled_takeoff = None
            self.drone.takeoff()

        self.axis_speed = self.cmd_axis_speed.copy()

        # If we are on the point to take a picture, the tracking is temporarily desactivated (2s)
        if self.timestamp_take_picture:
            if time.time() - self.timestamp_take_picture > 2:
                self.timestamp_take_picture = None
                self.drone.take_picture()
        else:

            # If we are doing a 360, where are we in our 360 ?
            if self.yaw_to_consume > 0:
                consumed = self.yaw - self.prev_yaw
                self.prev_yaw = self.yaw
                if consumed < 0: consumed += 360
                self.yaw_consumed += consumed
                if self.yaw_consumed > self.yaw_to_consume:
                    self.yaw_to_consume = 0
                    self.axis_speed["yaw"] = 0
                else:
                    self.axis_speed["yaw"] = self.def_speed["yaw"]

            # We are not flying, we check a potential morse code
            if not self.is_flying:
                pressing, detected = self.morse.eval(frame)
                if self.is_pressed and not pressing:
                    self.tone.off()
                elif not self.is_pressed and pressing:
                    self.tone.on()
                self.is_pressed = pressing

            # Call to openpose detection
            if self.use_openpose:

                nb_people, pose_kps, face_kps = self.op.eval(frame)

                target = None

                # Our target is the person whose index is 0 in pose_kps
                self.pose = None
                if nb_people > 0:
                    # We found a body, so we can cancel the exploring 360
                    self.yaw_to_consume = 0

                    # Do we recognize a predefined pose ?
                    self.pose = self.check_pose(w, h)

                    if self.pose:
                        # We trigger the associated action
                        log.info(f"pose detected : {self.pose}")
                        if self.pose == "HANDS_ON_NECK" or self.pose == "HANDS_ON_EARS":
                            # Take a picture in 1 second
                            if self.timestamp_take_picture is None:
                                log.info("Take a picture in 1 second")
                                self.timestamp_take_picture = time.time()
                                self.sound_player.play("taking picture")
                        elif self.pose == "RIGHT_ARM_UP_CLOSED":
                            log.info("GOING LEFT from pose")
                            self.axis_speed["roll"] = self.def_speed["roll"]
                        elif self.pose == "RIGHT_ARM_UP_OPEN":
                            log.info("GOING RIGHT from pose")
                            self.axis_speed["roll"] = -self.def_speed["roll"]
                        elif self.pose == "LEFT_ARM_UP_CLOSED":
                            log.info("GOING FORWARD from pose")
                            self.axis_speed["pitch"] = self.def_speed["pitch"]
                        elif self.pose == "LEFT_ARM_UP_OPEN":
                            log.info("GOING BACKWARD from pose")
                            self.axis_speed["pitch"] = -self.def_speed["pitch"]
                        elif self.pose == "CLOSE_HANDS_UP":
                            # Locked distance mode
                            if self.keep_distance is None:
                                if time.time(
                                ) - self.timestamp_keep_distance > 2:
                                    # The first frame of a serie to activate the distance keeping
                                    self.keep_distance = self.shoulders_width
                                    self.timestamp_keep_distance = time.time()
                                    log.info(
                                        f"KEEP DISTANCE {self.keep_distance}")
                                    self.pid_pitch = PID(0.5,
                                                         0.04,
                                                         0.3,
                                                         setpoint=0,
                                                         output_limits=(-50,
                                                                        50))
                                    #self.graph_distance = RollingGraph(window_name="Distance", y_max=500, threshold=self.keep_distance, waitKey=False)
                                    self.sound_player.play("keeping distance")
                            else:
                                if time.time(
                                ) - self.timestamp_keep_distance > 2:
                                    log.info("KEEP DISTANCE FINISHED")
                                    self.sound_player.play("free")
                                    self.keep_distance = None
                                    self.timestamp_keep_distance = time.time()

                        elif self.pose == "RIGHT_HAND_ON_LEFT_EAR":
                            # Get close to the body then palm landing
                            if not self.palm_landing_approach:
                                self.palm_landing_approach = True
                                self.keep_distance = proximity
                                self.timestamp_keep_distance = time.time()
                                log.info("APPROACHING on pose")
                                self.pid_pitch = PID(0.2,
                                                     0.02,
                                                     0.1,
                                                     setpoint=0,
                                                     output_limits=(-45, 45))
                                #self.graph_distance = RollingGraph(window_name="Distance", y_max=500, threshold=self.keep_distance, waitKey=False)
                                self.sound_player.play("approaching")
                        elif self.pose == "LEFT_HAND_ON_RIGHT_EAR":
                            if not self.palm_landing:
                                log.info("LANDING on pose")
                                # Landing
                                self.toggle_tracking(tracking=False)
                                self.drone.land()

                    # Draw the skeleton on the frame
                    self.op.draw_body(frame)

                    # In tracking mode, we track a specific body part (an openpose keypoint):
                    # the nose if visible, otherwise the neck, otherwise the midhip
                    # The tracker tries to align that body part with the reference point (ref_x, ref_y)
                    target = self.op.get_body_kp("Nose")
                    if target is not None:
                        ref_x = int(w / 2)
                        ref_y = int(h * 0.35)
                    else:
                        target = self.op.get_body_kp("Neck")
                        if target is not None:
                            ref_x = int(w / 2)
                            ref_y = int(h / 2)
                        else:
                            target = self.op.get_body_kp("MidHip")
                            if target is not None:
                                ref_x = int(w / 2)
                                ref_y = int(0.75 * h)

                if self.tracking:
                    if target:
                        self.body_in_prev_frame = True
                        # We draw an arrow from the reference point to the body part we are targeting
                        h, w, _ = frame.shape
                        xoff = int(target[0] - ref_x)
                        yoff = int(ref_y - target[1])
                        cv2.circle(frame, (ref_x, ref_y), 15, (250, 150, 0), 1,
                                   cv2.LINE_AA)
                        cv2.arrowedLine(frame, (ref_x, ref_y), target,
                                        (250, 150, 0), 6)

                        # The PID controllers calculate the new speeds for yaw and throttle
                        self.axis_speed["yaw"] = int(-self.pid_yaw(xoff))
                        log.debug(
                            f"xoff: {xoff} - speed_yaw: {self.axis_speed['yaw']}"
                        )
                        self.last_rotation_is_cw = self.axis_speed["yaw"] > 0

                        self.axis_speed["throttle"] = int(
                            -self.pid_throttle(yoff))
                        log.debug(
                            f"yoff: {yoff} - speed_throttle: {self.axis_speed['throttle']}"
                        )

                        # If in locke distance mode
                        if self.keep_distance and self.shoulders_width:
                            if self.palm_landing_approach and self.shoulders_width > self.keep_distance:
                                # The drone is now close enough to the body
                                # Let's do the palm landing
                                log.info("PALM LANDING after approaching")
                                self.palm_landing_approach = False
                                self.toggle_tracking(tracking=False)
                                self.palm_land()
                            else:
                                self.axis_speed["pitch"] = int(
                                    self.pid_pitch(self.shoulders_width -
                                                   self.keep_distance))
                                log.debug(
                                    f"Target distance: {self.keep_distance} - cur: {self.shoulders_width} -speed_pitch: {self.axis_speed['pitch']}"
                                )
                    else:  # Tracking but no body detected
                        if self.body_in_prev_frame:
                            self.timestamp_no_body = time.time()
                            self.body_in_prev_frame = False
                            self.axis_speed["throttle"] = self.prev_axis_speed[
                                "throttle"]
                            self.axis_speed["yaw"] = self.prev_axis_speed[
                                "yaw"]
                        else:
                            if time.time() - self.timestamp_no_body < 1:
                                print("NO BODY SINCE < 1", self.axis_speed,
                                      self.prev_axis_speed)
                                self.axis_speed[
                                    "throttle"] = self.prev_axis_speed[
                                        "throttle"]
                                self.axis_speed["yaw"] = self.prev_axis_speed[
                                    "yaw"]
                            else:
                                log.debug("NO BODY detected for 1s -> rotate")
                                self.axis_speed[
                                    "yaw"] = self.def_speed["yaw"] * (
                                        1 if self.last_rotation_is_cw else -1)

        # Send axis commands to the drone
        for axis, command in self.axis_command.items():
            if self.axis_speed[axis] is not None and self.axis_speed[
                    axis] != self.prev_axis_speed[axis]:
                log.debug(f"COMMAND {axis} : {self.axis_speed[axis]}")
                command(self.axis_speed[axis])
                self.prev_axis_speed[axis] = self.axis_speed[axis]
            else:
                # This line is necessary to display current values in 'self.write_hud'
                self.axis_speed[axis] = self.prev_axis_speed[axis]

        # Write the HUD on the frame
        frame = self.write_hud(frame)

        return frame

    def write_hud(self, frame):
        """
            Draw drone info on frame
        """
        class HUD:
            def __init__(self, def_color=(255, 170, 0)):
                self.def_color = def_color
                self.infos = []

            def add(self, info, color=None):
                if color is None: color = self.def_color
                self.infos.append((info, color))

            def draw(self, frame):
                i = 0
                for (info, color) in self.infos:
                    cv2.putText(frame, info, (0, 30 + (i * 30)),
                                cv2.FONT_HERSHEY_SIMPLEX, 1.0, color,
                                2)  #lineType=30)
                    i += 1

        hud = HUD()

        if self.debug: hud.add(datetime.datetime.now().strftime('%H:%M:%S'))
        hud.add(f"FPS {self.fps.get():.2f}")
        if self.debug: hud.add(f"VR {self.video_encoder_rate}")

        hud.add(f"BAT {self.battery}")
        if self.is_flying:
            hud.add("FLYING", (0, 255, 0))
        else:
            hud.add("NOT FLYING", (0, 0, 255))
        hud.add(f"TRACKING {'ON' if self.tracking else 'OFF'}",
                (0, 255, 0) if self.tracking else (0, 0, 255))
        hud.add(f"EXPO {self.exposure}")

        if self.axis_speed['yaw'] > 0:
            hud.add(f"CW {self.axis_speed['yaw']}", (0, 255, 0))
        elif self.axis_speed['yaw'] < 0:
            hud.add(f"CCW {-self.axis_speed['yaw']}", (0, 0, 255))
        else:
            hud.add(f"CW 0")
        if self.axis_speed['roll'] > 0:
            hud.add(f"RIGHT {self.axis_speed['roll']}", (0, 255, 0))
        elif self.axis_speed['roll'] < 0:
            hud.add(f"LEFT {-self.axis_speed['roll']}", (0, 0, 255))
        else:
            hud.add(f"RIGHT 0")
        if self.axis_speed['pitch'] > 0:
            hud.add(f"FORWARD {self.axis_speed['pitch']}", (0, 255, 0))
        elif self.axis_speed['pitch'] < 0:
            hud.add(f"BACKWARD {-self.axis_speed['pitch']}", (0, 0, 255))
        else:
            hud.add(f"FORWARD 0")
        if self.axis_speed['throttle'] > 0:
            hud.add(f"UP {self.axis_speed['throttle']}", (0, 255, 0))
        elif self.axis_speed['throttle'] < 0:
            hud.add(f"DOWN {-self.axis_speed['throttle']}", (0, 0, 255))
        else:
            hud.add(f"UP 0")

        if self.use_openpose:
            hud.add(f"POSE: {self.pose}", (0, 255, 0) if self.pose else
                    (255, 170, 0))
        if self.keep_distance:
            hud.add(
                f"Target distance: {self.keep_distance} - curr: {self.shoulders_width}",
                (0, 255, 0))
            #if self.shoulders_width: self.graph_distance.new_iter([self.shoulders_width])
        if self.timestamp_take_picture:
            hud.add("Taking a picture", (0, 255, 0))
        if self.palm_landing:
            hud.add("Palm landing...", (0, 255, 0))
        if self.palm_landing_approach:
            hud.add("In approach for palm landing...", (0, 255, 0))
        if self.tracking and not self.body_in_prev_frame and time.time(
        ) - self.timestamp_no_body > 0.5:
            hud.add("Searching...", (0, 255, 0))
        if self.throw_ongoing:
            hud.add("Throw ongoing...", (0, 255, 0))
        if self.scheduled_takeoff:
            seconds_left = int(self.scheduled_takeoff - time.time())
            hud.add(f"Takeoff in {seconds_left}s")

        hud.draw(frame)
        return frame

    def take_picture(self):
        """
            Tell drone to take picture, image sent to file handler
        """
        self.drone.take_picture()

    def set_exposure(self, expo):
        """
            Change exposure of drone camera
        """
        if expo == 0:
            self.exposure = 0
        elif expo == 1:
            self.exposure = min(9, self.exposure + 1)
        elif expo == -1:
            self.exposure = max(-9, self.exposure - 1)
        self.drone.set_exposure(self.exposure)
        log.info(f"EXPOSURE {self.exposure}")

    def palm_land(self):
        """
            Tell drone to land
        """
        self.palm_landing = True
        self.sound_player.play("palm landing")
        self.drone.palm_land()

    def throw_and_go(self, tracking=False):
        """
            Tell drone to start a 'throw and go'
        """
        self.drone.throw_and_go()
        self.tracking_after_takeoff = tracking

    def delayed_takeoff(self, delay=5):
        self.scheduled_takeoff = time.time() + delay
        self.tracking_after_takeoff = True

    def clockwise_degrees(self, degrees):
        self.yaw_to_consume = degrees
        self.yaw_consumed = 0
        self.prev_yaw = self.yaw

    def toggle_openpose(self):
        self.use_openpose = not self.use_openpose
        if not self.use_openpose:
            # Desactivate tracking
            self.toggle_tracking(tracking=False)
        log.info('OPENPOSE ' + ("ON" if self.use_openpose else "OFF"))

    def toggle_tracking(self, tracking=None):
        """ 
            If tracking is None, toggle value of self.tracking
            Else self.tracking take the same value as tracking
        """

        if tracking is None:
            self.tracking = not self.tracking
        else:
            self.tracking = tracking
        if self.tracking:
            log.info("ACTIVATE TRACKING")
            # Needs openpose
            self.use_openpose = True
            # Start an explarotary 360
            #self.clockwise_degrees(360)
            # Init a PID controller for the yaw
            self.pid_yaw = PID(0.25,
                               0,
                               0,
                               setpoint=0,
                               output_limits=(-100, 100))
            # ... and one for the throttle
            self.pid_throttle = PID(0.4,
                                    0,
                                    0,
                                    setpoint=0,
                                    output_limits=(-80, 100))
            # self.init_tracking = True
        else:
            self.axis_speed = {"yaw": 0, "roll": 0, "pitch": 0, "throttle": 0}
            self.keep_distance = None
        return

    def flight_data_handler(self, event, sender, data):
        """
            Listener to flight data from the drone.
        """
        self.battery = data.battery_percentage
        self.fly_mode = data.fly_mode
        self.throw_fly_timer = data.throw_fly_timer
        self.throw_ongoing = data.throw_fly_timer > 0

        # print("fly_mode",data.fly_mode)
        # print("throw_fly_timer",data.throw_fly_timer)
        # print("em_ground",data.em_ground)
        # print("em_sky",data.em_sky)
        # print("electrical_machinery_state",data.electrical_machinery_state)
        #print("em_sky",data.em_sky,"em_ground",data.em_ground,"em_open",data.em_open)
        #print("height",data.height,"imu_state",data.imu_state,"down_visual_state",data.down_visual_state)
        if self.is_flying != data.em_sky:
            self.is_flying = data.em_sky
            log.debug(f"FLYING : {self.is_flying}")
            if not self.is_flying:
                self.reset()
            else:
                if self.tracking_after_takeoff:
                    log.info("Tracking on after takeoff")
                    self.toggle_tracking(True)

        log.debug(
            f"MODE: {self.fly_mode} - Throw fly timer: {self.throw_fly_timer}")

    def log_data_handler(self, event, sender, data):
        """
            Listener to log data from the drone.
        """
        pos_x = -data.mvo.pos_x
        pos_y = -data.mvo.pos_y
        pos_z = -data.mvo.pos_z
        if abs(pos_x) + abs(pos_y) + abs(pos_z) > 0.07:
            if self.ref_pos_x == -1:  # First time we have meaningful values, we store them as reference
                self.ref_pos_x = pos_x
                self.ref_pos_y = pos_y
                self.ref_pos_z = pos_z
            else:
                self.pos_x = pos_x - self.ref_pos_x
                self.pos_y = pos_y - self.ref_pos_y
                self.pos_z = pos_z - self.ref_pos_z

        qx = data.imu.q1
        qy = data.imu.q2
        qz = data.imu.q3
        qw = data.imu.q0
        self.yaw = quat_to_yaw_deg(qx, qy, qz, qw)

        if self.write_log_data:
            if self.write_header:
                self.log_file.write('%s\n' % data.format_cvs_header())
                self.write_header = False
            self.log_file.write('%s\n' % data.format_cvs())

    def handle_flight_received(self, event, sender, data):
        """
            Create a file in local directory to receive image from the drone
        """
        path = f'{self.media_directory}/tello-{datetime.datetime.now().strftime(self.date_fmt)}.jpg'
        with open(path, 'wb') as out_file:
            out_file.write(data)
        log.info('Saved photo to %s' % path)
Exemple #10
0
pygame.display.set_caption("Fellas... the game")

# variables for the game loop
running = True
clock = pygame.time.Clock()

# main state handler
stateHandler = StateHandler()

# initial states
firstState = TitleScreen(stateHandler)
battleState = Battle(stateHandler)
deathState = DeathScreen(stateHandler)
winState = WinScreen(stateHandler)
finalWin = FinalWin(stateHandler)
soundPlayer = SoundPlayer()

if sys.argv[1] == "hmmmmm":
    soundPlayer.play("dope.ogg")
else:
    soundPlayer.play("mountain_king.ogg")

stateHandler.configStates(firstState, battleState, deathState, winState,
                          finalWin)  # configure the statehandler

while running:

    for event in pygame.event.get():  # loop through the events
        if event.type == pygame.QUIT:  # user quit?
            running = False  # set the exit flag
class KeyboardHandler:
    def __init__(self):
        logger.debug("Keyboard deteccter created.")
        self.keyboardList = self.find_keyboard()
        self.threads = []
        self.inputRecord = []
        self.hotKey = [16, 30, 44, 2, 3, 4]  # QAZ123
        self.sp = SoundPlayer()
        self.GUIID = None

    # list all event's name and its device
    def show_device(self):
        # os.chdir(deviceFilePath)
        for i in os.listdir(deviceFilePath):
            namePath = deviceFilePath + i + "/device/name"
            if os.path.isfile(namePath):
                logger.info("Name: %s Device: %s" % (i, file(namePath).read()))

    def set_style(self, style):
        self.sp.set_style(style)

    def set_volume(self, volume):
        self.sp.set_volume(volume)

    def set_pitch(self, pitch):
        self.sp.set_pitch(pitch)

    def get_player_infor(self):
        return self.sp.get_infor()

    @property
    def GUIID(self):
        return self.GUIID

    def show_GUI(self):
        if not self.GUIID:
            return
        # command = "xdotool windowactivate --sync %s" % self.GUIID
        command = "xdotool windowmap --sync %s && xdotool windowactivate --sync %s" % (self.GUIID, self.GUIID)
        commands.getstatusoutput(command)

    # new way to find keyboard
    # return with a list of keyboard's event
    def find_keyboard(self):
        keyboardList = []

        deviceInfo = open("/proc/bus/input/devices").read().lower().split("\n\n")
        for i in filter(
            lambda i: (not re.search("mouse", i) and re.search("bus=0003", i)) or re.search("keyboard", i), deviceInfo
        ):
            m = re.search("event\d+", i)
            if m:
                keyboardList.append(m.group())
        assert len(keyboardList) > 0
        return keyboardList

    # return with a list of keyboard's event
    # def find_keyboard(self):
    #     # os.chdir(deviceFilePath)
    #     keyboardList = []
    #     for event in os.listdir(deviceFilePath):
    #         namePath = deviceFilePath + event + '/device/name'
    #         namePathExist = os.path.isfile(namePath)
    #         deviceName = file(namePath).read().lower() if namePathExist else None

    #         if deviceName and deviceName.find('keyboard') != -1:
    #             keyboardList.append(event)
    #             continue

    #         # check other USB device
    #         bustypePath = deviceFilePath + event + '/device/id/bustype'
    #         bustypePathExist = os.path.isfile(bustypePath)
    #         bustype = file(bustypePath).read() if bustypePathExist else None
    #         # 0003 = USB device, consider it !mouse = keyboard :)
    #         isUSBKB = bustype and bustype.find('0003') != -1 and \
    #             deviceName and deviceName.find('mouse') == -1
    #         if isUSBKB:
    #             keyboardList.append(event)

    #     try:
    #         logger.debug(keyboardList)
    #         assert len(keyboardList) > 0
    #     except AssertionError:
    #         logger.error("Keyborad Not Found!!")
    #     return keyboardList

    # event.value:1 for pressed, 0 for release
    def detect_input_from_event(self, eventName):
        dev = InputDevice("/dev/input/" + eventName)
        while True:
            select([dev], [], [])
            for event in dev.read():
                if (event.value == 1 or event.value == 0) and event.code != 0:
                    if event.value == 1:
                        self.sp.play(event.code)
                        self.check_show_window(event.code)
                    logger.debug("Key: %s Status: %s" % (event.code, "pressed" if event.value else "release"))

    # check input if satisfy the hotkey
    def check_show_window(self, keycode):
        if len(self.inputRecord) > 0 and keycode == self.inputRecord[-1]:
            return
        if keycode == self.hotKey[len(self.inputRecord)]:
            self.inputRecord.append(keycode)
            logger.debug(self.inputRecord)
            if len(self.inputRecord) == 6:
                self.show_GUI()
                self.inputRecord = []
        else:
            self.inputRecord = []

    def start_detecting(self):

        for i in self.keyboardList:
            t = threading.Thread(target=self.detect_input_from_event, args=(i,))
            self.threads.append(t)
            t.start()

    # kill all threads
    def stop_detecting(self):
        for t in self.threads:
            t._Thread__stop()
Exemple #12
0
                self.logic.snake.next_y_dir = 1
        if event.key() == Qt.Key_Space:
            if self.pause == True:
                self.start_game()
            else:
                self.pause_game()

    def inv_color(self, color):
        return QColor(255 - color.red(), 255 - color.green(),
                      255 - color.blue())


class Canvas(QWidget):
    def __init__(self, x, y):
        super().__init__()
        self.resize(x, y)
        self.painter = QPainter()

    def set_color(self, color):
        self.painter.setBrush(color)
        self.painter.setPen(color)


if __name__ == '__main__':
    app = QApplication(sys.argv)
    database_worker = DBWorker()
    sound_player = SoundPlayer()
    wnd = MainForm(database_worker, sound_player)
    wnd.show()
    sys.exit(app.exec())
Exemple #13
0
    def loadThings(self):

        from Config import Config
        from DataReader import DataReader
        from Dictionary import Dictionary

        self.bindThings()

        self.__loader.dataReader = DataReader()
        self.__loader.config = Config(self.__loader.dataReader)
        self.__loader.dictionaries = Dictionary(self.__loader.dataReader,
                                                self.__loader.config)

        from FileDialogs import FileDialogs
        self.__loader.fileDialogs = FileDialogs(self.__loader.dictionaries,
                                                self.__loader.config,
                                                self.__loader)

        from AutoSetter import AutoSetter
        self.__loader.autoSetter = AutoSetter(self.__loader.config,
                                              self.__loader.fileDialogs)

        from SoundPlayer import SoundPlayer
        self.__loader.soundPlayer = SoundPlayer(self.__loader.config)

        from IO import IO
        self.__loader.io = IO(self.__loader.dictionaries, self.__loader.config,
                              self.__loader)

        self.__loader.sections = self.__loader.io.getFileNamesInDir(
            "templates/bank2_8/")

        from VirtualMemory import VirtualMemory
        self.__loader.virtualMemory = VirtualMemory(self.__loader)

        from ColorPalettes import ColorPalettes
        self.__loader.colorPalettes = ColorPalettes(self.__loader)

        from Logger import Logger
        self.__loader.logger = Logger(self.__loader)

        self.__loader.io.loadSyntax()

        __w = self.__loader.screenSize[0] - 150
        __h = self.__loader.screenSize[1] - 200
        __h = __h - (__h // 11.25) - (__h // 30) * 2

        self.loadAnimationFrames("logo", 20, self.__loader.atariFrames, "gif",
                                 (__w, __h, 0.6))

        self.loadAnimationFrames("rocket", 67, self.__loader.rocketFrames,
                                 "png", (__w, __h, 0.2))

        self.loadAnimationFrames("tape", 31, self.__loader.tapeFrames, "gif",
                                 (__w // 2.75, __h // 2.75, 1))

        s = (round(self.__loader.screenSize[0] // 1.15 // 7 * 6),
             round((self.__loader.screenSize[1] // 1.25 - 55) // 20 * 19))
        """
        self.loadAnimationFrames("centipede", 19,
                                 self.__loader.centipedeFrames, "png",
                                 (s[0], s[1], 1)
                                 )
        """

        self.loadAnimationFrames("plasma", 65, self.__loader.rainbowFrames,
                                 "gif", (s[0], s[1], 1))

        self.loadAnimationFrames("lock", 4, self.__loader.lockedFramesTopLevel,
                                 "png", (s[0], s[1], 1))

        from ColorDict import ColorDict
        self.__loader.colorDict = ColorDict(self.__loader)

        from TiaTone import TiaTone
        self.__loader.tiaTone = TiaTone()

        from PiaNotes import PiaNotes
        self.__loader.piaNotes = PiaNotes(self.__loader)

        from Executor import Executor
        self.__loader.executor = Executor(self.__loader)

        self.__Loading_Window.destroy()