# Oculus Rift head-mounted display example for rendering 3D with head tracking.
# Press the 'q' key or use the application GUI to exit. Press 'r' to recenter
# the HMD's view. Requires PsychXR to be installed.
#
# This file is public domain.
#
from psychopy import visual, event, core
from psychopy.tools.rifttools import *  # math types are accessed from here
import pyglet.gl as GL
import math

# Create a VR session, treat the returned object just like a regular window.
# Change headLocked to True to disable head tracking, increase the number of
# samples for anti-aliasing, could be 2, 4, 6, 8, 16 or 32 depending on your
# hardware.
hmd = visual.Rift(headLocked=False, samples=1)

# loop until the user quits the app through the GUI menu
stopApp = False
while not stopApp:
    for i in ('left', 'right'):
        hmd.setBuffer(i)  # select the eye buffer to draw to

        # Setup the viewing parameters for the current buffer, this needs to be
        # called every time the buffer changes.
        #
        # Use setRiftView to setup the projection and view matrices
        # automatically from data provided by the API. Take note of which eye
        # buffer is active when rendering.
        #
        hmd.setRiftView()
Esempio n. 2
0
# Oculus Rift head-mounted display example for rendering 3D with head tracking.
# Press the 'q' key or use the application GUI to exit. Press 'r' to recenter
# the HMD's view. Requires PsychXR 0.2+ to be installed.
#
# This file is public domain.
#
from psychopy import visual, event, core
from psychopy.tools import arraytools, rifttools
import pyglet.gl as GL

# Create a VR session, treat the returned object just like a regular window.
# Increase the number of samples for anti-aliasing, could be 2, 4, 6, 8, 16 or
# 32 depending on your hardware. The GLFW backend is preferred when using VR.
hmd = visual.Rift(samples=1, color=(0, 0, 0), colorSpace='rgb', winType='glfw')

# Create a LibOVRPose object to represent the rigid body pose of the triangle in
# the scene. The position of the triangle will be 2 meters away from the user at
# eye height which we obtain from the HMD's settings.
trianglePosition = (0., hmd.eyeHeight, -2.)
trianglePose = rifttools.LibOVRPose(trianglePosition)

# convert the pose to a view transformation matrix
translationMatrix = trianglePose.getModelMatrix()

# convert to format Pyglet's GL libraries accept
translationMatrix = arraytools.array2pointer(translationMatrix)

# uncomment the line below to show a performance HUD
# hmd.perfHudMode('PerfSummary')

# loop until the user quits the app through the GUI menu
# Oculus Rift head-mounted display example for rendering 3D with head tracking.
# Press the 'q' key or use the application GUI to exit. Press 'r' to recenter
# the HMD's view. Requires PsychXR 0.2+ to be installed.
#
# This file is public domain.
#
from psychopy import visual, event, core
from psychopy.tools import arraytools, rifttools
import pyglet.gl as GL

# Create a VR session, treat the returned object just like a regular window.
# Increase the number of samples for anti-aliasing, could be 2, 4, 6, 8, 16 or
# 32 depending on your hardware.
hmd = visual.Rift(samples=1)

# Create a LibOVRPose object to represent the rigid body pose of the triangle in
# the scene. The position of the triangle will be 2 meters away from the user at
# eye height which we obtain from the HMD's settings.
trianglePosition = (0., hmd.eyeHeight, -2.)
trianglePose = rifttools.LibOVRPose(trianglePosition)

# convert the pose to a view transformation matrix
translationMatrix = trianglePose.getModelMatrix()

# convert to format Pyglet's GL libraries accept
translationMatrix = arraytools.array2pointer(translationMatrix)

# loop until the user quits the app through the GUI menu
stopApp = False
while not stopApp:
    # Get the current tracking state for the HMD which contains lots of
Esempio n. 4
0
# Minimal Oculus Rift head-mounted display example. Press the 'q' key or use
# the application GUI to exit. Requires PsychXR to be installed.
#
# This file is public domain.
#
from psychopy import visual, event, core  # visual imported, even if not used!

# Create a VR session, treat the returned object just like a regular window.
#
hmd = visual.Rift()

# loop until the user quits the app through the GUI menu
stopApp = False
while not stopApp:
    for i in ('left', 'right'):
        hmd.setBuffer(i)  # select the eye buffer to draw to

        # Setup the viewing parameters for the current buffer, this needs to be
        # called every time the buffer changes.
        #
        # For standard PsychoPy stimuli (e.g. GratingStim, ImageStim, etc.) you
        # should use 'setDefaultView' with 'mono=True' when creating a
        # visual.Rift instance. This configures the headset to properly render
        # 2D stimuli, treating the HMD as a monitor.
        #
        hmd.setDefaultView()

    # send the rendered buffer to the HMD
    hmd.flip()

    # check if the application should exit
Esempio n. 5
0
### Begin main script ###

# Open camera stream
if camera_backend == 'opencv':
    stream = OpenCV_VideoStream(**opencv_settings)
elif camera_backend == 'ueye':
    stream = uEyeVideoStream(**ueye_settings)
else:
    raise RuntimeError('Unknown backend')

# Set post-proc function, default application to False
stream.setPostproc(negate)
stream.switchApplyPostproc(False)

# Open handle to rift
hmd = visual.Rift(monoscopic=True, color=-1,  warnAppFrameDropped=False)

# Prep framestim
disp_size = [x * mag_factor for x in hmd._hmdBufferSize]
framestim = FrameStim(hmd, display_size=disp_size, interpolate=True)

#timestamps = []

# Begin main loop
KEEPGOING = True
while KEEPGOING:
    # Get frame from stream, update and display framestim
    framestim.frame = stream.get_frame()
    framestim.draw()
    t = hmd.flip()
    #timestamps.append(t)