Esempio n. 1
0
    def poll_events(self):
        """
        Retrieve and forward all the events pending in the VR system to the registered event handlers.
        """

        event = openvr.VREvent_t()
        has_events = self.vr_system.pollNextEvent(event)
        while has_events:
            if event.eventType == openvr.VREvent_InputFocusCaptured:
                if self.verbose:
                    print("Application captured the input focus")
                self.has_focus = True
            elif event.eventType == openvr.VREvent_InputFocusReleased:
                if self.verbose:
                    print("Application released the input focus")
                self.has_focus = False
            if hasattr(self, 'process_vr_event'):
                if not self.process_vr_event_notified:
                    print(
                        "WARNING: 'update_action()' method is deprecated and will be removed in a next release"
                    )
                    self.process_vr_event_notified = True
                self.process_vr_event(event)
            else:
                for event_handler in self.event_handlers:
                    event_handler(event)
            has_events = self.vr_system.pollNextEvent(event)
Esempio n. 2
0
 def __init__(self, multisample=0, znear=0.1, zfar=1000, poll_tracked_device_frequency=None):
     self.vr_system = openvr.init(openvr.VRApplication_Scene)
     w, h = self.vr_system.getRecommendedRenderTargetSize()
     self.vr_framebuffers = (OpenVRFramebuffer(w, h, multisample=multisample),
                             OpenVRFramebuffer(w, h, multisample=multisample))
     self._multisample = multisample
     self.vr_compositor = openvr.VRCompositor()
     if self.vr_compositor is None:
         raise Exception('unable to create compositor')
     self.vr_framebuffers[0].init_gl()
     self.vr_framebuffers[1].init_gl()
     self._poses = (openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount)()
     self.projection_matrices = (np.asarray(matrixForOpenVRMatrix(self.vr_system.getProjectionMatrix(openvr.Eye_Left,
                                                                                                     znear, zfar))),
                                 np.asarray(matrixForOpenVRMatrix(self.vr_system.getProjectionMatrix(openvr.Eye_Right,
                                                                                                     znear, zfar))))
     self.eye_transforms = (np.asarray(matrixForOpenVRMatrix(self.vr_system.getEyeToHeadTransform(openvr.Eye_Left)).I),
                            np.asarray(matrixForOpenVRMatrix(self.vr_system.getEyeToHeadTransform(openvr.Eye_Right)).I))
     self.view = np.eye(4, dtype=np.float32)
     self.view_matrices  = (np.empty((4,4), dtype=np.float32),
                            np.empty((4,4), dtype=np.float32))
     self.controllers = TrackedDevicesActor(self._poses)
     #self.controllers.show_controllers_only = False
     self.controllers.init_gl()
     self.vr_event = openvr.VREvent_t()
     self._poll_tracked_device_count()
     self._poll_tracked_device_frequency = poll_tracked_device_frequency
     self._frames_rendered = 0
     self._pulse_t0 = 0.0
Esempio n. 3
0
def do_work(vrsystem, left_controller: Controller, right_controller: Controller, wheel: Wheel, poses):
    vrsystem.getDeviceToAbsoluteTrackingPose(openvr.TrackingUniverseSeated, 0, len(poses), poses)
    left_controller.update(poses[left_controller.id.value])
    right_controller.update(poses[right_controller.id.value])
    event = openvr.VREvent_t()
    while vrsystem.pollNextEvent(event):
        hand = None
        if event.trackedDeviceIndex == left_controller.id.value:

            if event.eventType == openvr.VREvent_ButtonTouch:
                if DEBUG:
                    print("LEFT HAND EVENT: BUTTON TOUCH, BUTTON ID", event.data.controller.button)
                if event.data.controller.button == openvr.k_EButton_SteamVR_Touchpad:
                    wheel.set_trackpad_touch_left()
                elif  event.data.controller.button == openvr.k_EButton_SteamVR_Trigger:
                    wheel.set_trigger_touch_left()
            elif  event.eventType == openvr.VREvent_ButtonUntouch:
                if DEBUG:
                    print("LEFT HAND EVENT: BUTTON UNTOUCH, BUTTON ID", event.data.controller.button)
                if event.data.controller.button == openvr.k_EButton_SteamVR_Touchpad:
                    wheel.set_trackpad_untouch_left()
                elif  event.data.controller.button == openvr.k_EButton_SteamVR_Trigger:
                    wheel.set_trigger_untouch_left()

            hand = 'left'
        if event.trackedDeviceIndex == right_controller.id.value:

            if event.eventType == openvr.VREvent_ButtonTouch:
                if DEBUG:
                    print("RIGHT HAND EVENT: BUTTON TOUCH, BUTTON ID", event.data.controller.button)
                if event.data.controller.button == openvr.k_EButton_SteamVR_Touchpad:
                    wheel.set_trackpad_touch_right()
                elif  event.data.controller.button == openvr.k_EButton_SteamVR_Trigger:
                    wheel.set_trigger_touch_right()
            elif  event.eventType == openvr.VREvent_ButtonUntouch:
                if DEBUG:
                    print("RIGHT HAND EVENT: BUTTON UNTOUCH, BUTTON ID", event.data.controller.button)

                if event.data.controller.button == openvr.k_EButton_SteamVR_Touchpad:
                    wheel.set_trackpad_untouch_right()
                elif  event.data.controller.button == openvr.k_EButton_SteamVR_Trigger:
                    wheel.set_trigger_untouch_right()

            hand = 'right'
        if hand:
            if event.eventType == openvr.VREvent_ButtonPress:
                if DEBUG:
                    print(hand, "HAND EVENT: BUTTON PRESS, BUTTON ID", event.data.controller.button)

                button = event.data.controller.button
                wheel.set_button_press(button, hand)
            if event.eventType == openvr.VREvent_ButtonUnpress:
                if DEBUG:
                    print(hand, "HAND EVENT: BUTTON UNPRESS, BUTTON ID", event.data.controller.button)
                button = event.data.controller.button
                wheel.set_button_unpress(button, hand)
    if wheel.config.edit_mode:
        wheel.edit_mode(left_controller, right_controller)
    else:
        wheel.update(left_controller, right_controller)
Esempio n. 4
0
 def poll_vr_events(self):
     """
     Used to poll VR events and find any new tracked devices or ones that are no longer tracked.
     """
     event = openvr.VREvent_t()
     while self.vrsystem.pollNextEvent(event):
         if event.eventType == openvr.VREvent_TrackedDeviceActivated:
             self.add_tracked_device(event.trackedDeviceIndex)
         elif event.eventType == openvr.VREvent_TrackedDeviceDeactivated:
             # If we were already tracking this device, quit tracking it.
             if event.trackedDeviceIndex in self.device_index_map:
                 self.remove_tracked_device(event.trackedDeviceIndex)
Esempio n. 5
0
 def checkEvent(self, result):
     # return if event happen or not. result would show the event type
     # [which hand, grip or trigger, button press or button un press]
     event = openvr.VREvent_t()
     if (self.vr.pollNextEvent(event)):
         trackedDeviceClass = self.vr.getTrackedDeviceClass(
             event.trackedDeviceIndex)
         if (trackedDeviceClass != 2):
             return False
         role = self.vr.getControllerRoleForTrackedDeviceIndex(
             event.trackedDeviceIndex)
         if (role == openvr.TrackedControllerRole_Invalid):
             print("controller not in base")
             return False
         # elif(role == openvr.TrackedControllerRole_LeftHand):
         #     result.append("0")
         # elif(role == openvr.TrackedControllerRole_RightHand):
         #     result.append("1")
         if (event.data.controller.button == openvr.k_EButton_Grip):
             if (event.eventType == openvr.VREvent_ButtonPress):
                 result += ([
                     str(openvr.k_EButton_Grip),
                     str(openvr.VREvent_ButtonPress)
                 ])
             elif (event.eventType == openvr.VREvent_ButtonUnpress):
                 result += ([
                     str(openvr.k_EButton_Grip),
                     str(openvr.VREvent_ButtonUnpress)
                 ])
             else:
                 return False
         elif (event.data.controller.button ==
               openvr.k_EButton_SteamVR_Trigger):
             if (event.eventType == openvr.VREvent_ButtonPress):
                 result += ([
                     str(openvr.k_EButton_SteamVR_Trigger),
                     str(openvr.VREvent_ButtonPress)
                 ])
             elif (event.eventType == openvr.VREvent_ButtonUnpress):
                 result += ([
                     str(openvr.k_EButton_SteamVR_Trigger),
                     str(openvr.VREvent_ButtonUnpress)
                 ])
             else:
                 return False
         else:
             return False
         return True
     return False
Esempio n. 6
0
 def __init__(self,
              multisample=0,
              znear=0.1,
              zfar=1000,
              window_size=(960, 1080)):
     self.vr_system = openvr.init(openvr.VRApplication_Scene)
     w, h = self.vr_system.getRecommendedRenderTargetSize()
     self.window_size = window_size
     self.multisample = multisample
     self.vr_framebuffers = (OpenVRFramebuffer(w,
                                               h,
                                               multisample=multisample),
                             OpenVRFramebuffer(w,
                                               h,
                                               multisample=multisample))
     self.vr_compositor = openvr.VRCompositor()
     if self.vr_compositor is None:
         raise Exception('unable to create compositor')
     self.vr_framebuffers[0].init_gl()
     self.vr_framebuffers[1].init_gl()
     poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount
     self.poses = poses_t()
     self.projection_matrices = (np.asarray(
         matrixForOpenVRMatrix(
             self.vr_system.getProjectionMatrix(openvr.Eye_Left, znear,
                                                zfar))),
                                 np.asarray(
                                     matrixForOpenVRMatrix(
                                         self.vr_system.getProjectionMatrix(
                                             openvr.Eye_Right, znear,
                                             zfar))))
     self.eye_transforms = (np.asarray(
         matrixForOpenVRMatrix(
             self.vr_system.getEyeToHeadTransform(openvr.Eye_Left)).I),
                            np.asarray(
                                matrixForOpenVRMatrix(
                                    self.vr_system.getEyeToHeadTransform(
                                        openvr.Eye_Right)).I))
     self.view_matrices = (np.empty(
         (4, 4), dtype=np.float32), np.empty((4, 4), dtype=np.float32))
     self.hmd_matrix = np.eye(4, dtype=np.float32)
     self.vr_event = openvr.VREvent_t()
     self._controller_indices = []
     for i in range(openvr.k_unMaxTrackedDeviceCount):
         if self.vr_system.getTrackedDeviceClass(
                 i) == openvr.TrackedDeviceClass_Controller:
             self._controller_indices.append(i)
Esempio n. 7
0
 def __init__(self,
              multisample=0,
              znear=0.1,
              zfar=1000,
              window_size=(960, 1080)):
     self.multisample = multisample
     self.znear, self.zfar = znear, zfar
     self.window_size = np.array(window_size, dtype=np.int64)
     poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount
     self.poses = poses_t()
     self.eye_matrices = (np.eye(4, dtype=np.float32),
                          np.eye(4, dtype=np.float32))
     self.camera_matrices = (np.eye(4, dtype=np.float32),
                             np.eye(4, dtype=np.float32))
     self.hmd_matrix = np.eye(4, dtype=np.float32)
     self.hmd_matrix_inv = np.eye(4, dtype=np.float32)
     self.vr_event = openvr.VREvent_t()
     self._controller_indices = []
     self._controller_poll_interval = 0.25
     self._nframes = 0
     self._time_to_poll = 0.0
Esempio n. 8
0
    def update_controller_states(self):
        new_event = openvr.VREvent_t()
        while openvr.VRSystem().pollNextEvent(new_event):
            self._check_controller_drag(new_event)
        now_is_dragging = self.left_controller.is_dragging or self.right_controller.is_dragging

        xform = self._compute_controllers_transform()
        if xform is not None:
            obj.model_matrix *= xform

        # Check for drag begin/end
        if self.is_dragging and not now_is_dragging:
            # print ("drag released!")
            # maybe record velocity
            self._begin_inertial_coast()
        elif now_is_dragging and not self.is_dragging:
            # print ("drag started!")
            self.translation_history.clear()
            self.speed = 0.0
        elif now_is_dragging:  # continued drag
            pass
        else:  # not dragging, so maybe slow inertial coasting
            if self.speed > 0:
                dt = time.time() - self.previous_update_time
                dv = dt * self.velocity_damping
                self.speed -= dv
                if self.speed < 0:  # stay positive
                    self.speed = 0.0
                elif self.speed < self.min_velocity:  # static friction takes over at the very end
                    self.speed = 0.0
                else:
                    # print ("speed = %.3f meters per second" % self.speed)
                    dx = self.speed * dt * self.direction
                    obj.model_matrix *= MyTransform.translation(dx)
        self.previous_update_time = time.time()

        # Remember drag state
        self.is_dragging = now_is_dragging
Esempio n. 9
0
 def poll_events(self):
     event = openvr.VREvent_t()
     has_events = self.vr_system.pollNextEvent(event)
     while has_events:
         self.process_vr_event(event)
         has_events = self.vr_system.pollNextEvent(event)
Esempio n. 10
0
 def __init__(self,
              multisample=0,
              znear=0.1,
              zfar=1000,
              window_size=(960, 1080)):
     self.vr_system = openvr.init(openvr.VRApplication_Scene)
     w, h = self.vr_system.getRecommendedRenderTargetSize()
     self.render_target_size = np.array((w, h), dtype=np.float32)
     self.window_size = np.array(window_size, dtype=np.int64)
     self.multisample = multisample
     self.vr_framebuffers = (OpenVRFramebuffer(w,
                                               h,
                                               multisample=multisample),
                             OpenVRFramebuffer(w,
                                               h,
                                               multisample=multisample))
     self.vr_compositor = openvr.VRCompositor()
     if self.vr_compositor is None:
         raise Exception('unable to create compositor')
     self.vr_framebuffers[0].init_gl()
     self.vr_framebuffers[1].init_gl()
     poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount
     self.poses = poses_t()
     self.znear, self.zfar = znear, zfar
     self.projection_matrices = (np.asarray(
         matrixForOpenVRMatrix(
             self.vr_system.getProjectionMatrix(openvr.Eye_Left, znear,
                                                zfar))),
                                 np.asarray(
                                     matrixForOpenVRMatrix(
                                         self.vr_system.getProjectionMatrix(
                                             openvr.Eye_Right, znear,
                                             zfar))))
     self.projection_lrbts = (np.array(
         self.vr_system.getProjectionRaw(openvr.Eye_Left)),
                              np.array(
                                  self.vr_system.getProjectionRaw(
                                      openvr.Eye_Right)))
     self.eye_to_head_transforms = (
         np.asarray(
             matrixForOpenVRMatrix(
                 self.vr_system.getEyeToHeadTransform(openvr.Eye_Left))),
         np.asarray(
             matrixForOpenVRMatrix(
                 self.vr_system.getEyeToHeadTransform(openvr.Eye_Right))))
     self.eye_transforms = (np.asarray(
         matrixForOpenVRMatrix(
             self.vr_system.getEyeToHeadTransform(openvr.Eye_Left)).I),
                            np.asarray(
                                matrixForOpenVRMatrix(
                                    self.vr_system.getEyeToHeadTransform(
                                        openvr.Eye_Right)).I))
     self.eye_matrices = (np.eye(4, dtype=np.float32),
                          np.eye(4, dtype=np.float32))
     self.camera_matrices = (np.eye(4, dtype=np.float32),
                             np.eye(4, dtype=np.float32))
     self.hmd_matrix = np.eye(4, dtype=np.float32)
     self.hmd_matrix_inv = np.eye(4, dtype=np.float32)
     self.vr_event = openvr.VREvent_t()
     self._nframes = 0
     self._poll_for_controllers()
Esempio n. 11
0
 def handle_input(self):
     # Note: Key events are handled by glfw in key_callback
     # Process SteamVR events
     event = openvr.VREvent_t()
     has_events = True
     while has_events:
         has_events = self.hmd.pollNextEvent(event)
         self.process_vr_event(event)
     # Process SteamVR action state
     # UpdateActionState is called each frame to update the state of the actions themselves. The application
     # controls which action sets are active with the provided array of VRActiveActionSet_t structs.
     action_sets = (openvr.VRActiveActionSet_t * 1)()
     action_set = action_sets[0]
     action_set.ulActionSet = self.action_set_demo
     openvr.VRInput().updateActionState(action_sets)
     #
     self.show_cubes = not get_digital_action_state(
         self.action_hide_cubes)[0]
     #
     bH, haptic_device = get_digital_action_rising_edge(
         self.action_trigger_haptic, True)
     if bH:
         for hand in self.hand:
             if haptic_device == hand.source:
                 openvr.VRInput().triggerHapticVibrationAction(
                     hand.action_haptic, 0, 1, 4, 1,
                     openvr.k_ulInvalidInputValueHandle)
     analog_data = openvr.VRInput().getAnalogActionData(
         self.action_analog_input, openvr.k_ulInvalidInputValueHandle)
     self.analog_value[0] = analog_data.x
     self.analog_value[
         1] = analog_data.y  # TODO: these seem to be unused...
     self.hand[Left].show_controller = True
     self.hand[Right].show_controller = True
     do_hide, hide_device = get_digital_action_state(
         self.action_hide_this_controller, True)
     if do_hide:
         for hand in self.hand:
             if hide_device == hand.source:
                 hand.show_controller = False
     for hand in self.hand:
         pose_data = openvr.VRInput().getPoseActionDataForNextFrame(
             hand.action_pose,
             openvr.TrackingUniverseStanding,
             openvr.k_ulInvalidInputValueHandle,
         )
         if not pose_data.bActive:
             hand.show_controller = False
             continue
         if not pose_data.pose.bPoseIsValid:
             hand.show_controller = False
             continue
         hand.pose = convert_steam_vr_matrix(
             pose_data.pose.mDeviceToAbsoluteTracking)
         origin_info = openvr.VRInput().getOriginTrackedDeviceInfo(
             pose_data.activeOrigin, )
         if origin_info.trackedDeviceIndex != openvr.k_unTrackedDeviceIndexInvalid:
             render_model_name = openvr.VRSystem(
             ).getStringTrackedDeviceProperty(
                 origin_info.trackedDeviceIndex,
                 openvr.Prop_RenderModelName_String)
             hand.render_model = self.find_or_load_render_model(
                 render_model_name)
             hand.render_model_name = render_model_name
Esempio n. 12
0
trainer = subprocess.Popen(args=[CE_exes[mw.version]])
time.sleep(3)
keyboard.toggle([Key.ctrl_l, Key.f1], True)
time.sleep(0.2)
keyboard.toggle([Key.ctrl_l, Key.f1], False)

print "Default camera controls deactivated."
print "Ctrl-c in this window to stop"

toggle_view_button_down = False
free_look_offset = [0, 0, 0]
view_mode = "FOLLOW"
head_offset_from_hmd = 0.1
old_yaw = 0

event = openvr.VREvent_t()

if mw.version != "1.12":
    PLAYER_HEIGHT -= 2.0
paused = False

# main loop
while (True):
    while (True):
        new_event = vrsystem.pollNextEvent(event)
        if not new_event:
            break
        else:
            if event.eventType == 103 and event.trackedDeviceIndex == openvr.k_unTrackedDeviceIndex_Hmd:
                paused = False
                break
Esempio n. 13
0
    with openvr.glframework.glfw_app.GlfwApp(renderer, 'Photosphere') as app:
        # app.run_loop()

        app.init_gl()
        # renderer.compositor.setExplicitTimingMode(
        #							openvr.VRCompositorTimingMode_Explicit_RuntimePerformsPostPresentHandoff)
        frames_displayed = 0
        last_print_time = time.time()
        display_gl_time = 0
        getposes_time = 0

        poses_t = openvr.TrackedDevicePose_t * openvr.k_unMaxTrackedDeviceCount  #!!!
        poses = poses_t()

        render_times_ms = []
        ev = openvr.VREvent_t()
        while not glfw.window_should_close(app.window):

            frame_start_time = time.time()
            # app.render_scene()

            # app.render_scene() replacement:
            cur_frame_timings = []
            app.init_gl()
            glfw.make_context_current(app.window)
            cur_frame_timings.append(
                int((time.time() - frame_start_time) * 1000))

            app.renderer.render_scene()
            cur_frame_timings.append(
                int((time.time() - frame_start_time) * 1000))