def __init__(self): ovr.initialize(None) self.session, luid = ovr.create() hmdDesc = ovr.getHmdDesc(self.session) print "Servo client running" # Used for anti-shaking self.xtemp=0 self.ytemp=0 self.ztemp=0
def init_oculus(): ''' Initialize a session with the Oculus Rift. The session will allow us to query it for data ''' ovr.initialize(None) session, luid = ovr.create() print("Connected to Oculus") return session
def rift(label, filename, video_length_in_s, process_name, autoplay_enabled, overwrite_enabled, acr, captureviewport): """ Obtains and saves the data captured by the Oculus Rift in a JSON file. :param label: The label of the subject. :param filename: The filename of the video. :param video_length_in_s: The length of the video in s. :param process_name: The name of the process to monitor. :param autoplay_enabled: Whether automatic playback of the video is enabled or not. :param overwrite_enabled: Whether overwriting the current dataset is enabled or not. """ video_playing_started = False # Necessary for getting information about playback state of video data = { 'filename': filename, 'video_length_in_s': float(video_length_in_s), 'hmd': 'rift', 'pitch_yaw_roll_data_hmd': [] } if acr: data["acr"] = 0 # Initialize ovr session here as otherwise the video wouldn't be shown on the Rift. ovr.initialize(None) session, luid = ovr.create() if autoplay_enabled: # Sleep a few seconds until the video is completely loaded by Whirligig if process_name == "Whirligig64bit.exe": helpers.autoplay_sleep( float(5.5) ) # Adjust value based on your computing power (between 4 and 6 is normal) else: print( "If the player has loaded the video, press the SPACE key to start the measurement." ) while video_playing_started == False: if autoplay_enabled: pyautogui.press("space") start_time = timer() print("Recording of Pitch/Yaw/Roll/Time data will start now.") video_playing_started = True else: if keyboard.is_pressed("space"): start_time = timer() print("Recording of Pitch/Yaw/Roll/Time data will start now.") video_playing_started = True try: pyautogui.press( "r" ) # Press R key to assure the user starts watching the video at initial position current_playback_state = timer() - start_time pitch, yaw, roll = getPYRDataFromRift(session) # Define the current position of the user (very first yaw value) as initial position initial_yaw = yaw # Start viewport capturing if enabled if captureviewport: helpers.capture_viewport(video_length_in_s, "%s_%s" % (label, filename.split(".")[0])) # Get Head orientation data from Vive in loop until the playback is finished while video_length_in_s > current_playback_state: pitch, yaw, roll = getPYRDataFromRift(session) current_playback_state = timer() - start_time # Adjust the yaw value according to the very first recorded yaw value yaw = helpers.shift_yaw(yaw, initial_yaw) if current_playback_state is not None: data["pitch_yaw_roll_data_hmd"].append({ 'pitch': pitch, 'yaw': yaw, 'roll': roll, 'sec': current_playback_state }) # print("Pitch/Yaw/Roll: %s / %s / %s" % (int(pitch), int(yaw), int(roll))) # By adjusting this value, you can set the recording frequency (currently 200Hz). time.sleep(float(0.005)) if acr: data["acr"] = helpers.get_acr_by_cmd_input() helpers.write_to_json(label, data, filename.split(".")[0], overwrite_enabled) ovr.destroy(session) ovr.shutdown() print("Measurement done. Opening the next video if available.") except KeyboardInterrupt: # Break if ctrl+c is pressed ovr.destroy(session) ovr.shutdown() print( "Current Measurement is interrupted and will not be saved. The program will exit now." )
def initialize(params=None): return ovr.initialize(params)
UDP_SEND_UNITY_PORT = 25003 UDP_RECV_UNITY_PORT = 25005 recvSock = socket.socket( socket.AF_INET, # Internet socket.SOCK_DGRAM) # UDP recvSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) recvSock.setblocking(False) recvSock.bind((UDP_IP, UDP_RECV_UNITY_PORT)) sendSock = socket.socket( socket.AF_INET, # Internet socket.SOCK_DGRAM) # UDP ################################################## ###Initialize OVR######################### ovr.initialize(None) hmd, luid = ovr.create() hmdDesc = ovr.getHmdDesc(hmd) angularAcc = ovr.Vector3f(0, 0, 0) linearAcc = ovr.Vector3f(0, 0, 0) timeSec = 0 ########################################## dateToday = str(datetime.date.today()) Ts = 0.01 reset = False start = True latched = False elapsed = 0.0 experimentStart = 0.0 previousTime = unix.time()
def setUp(self): ovr.initialize(None) self.hmd, luid = ovr.create()
# import the necessary packages import websocket import time import subprocess import ovr import sys import math ovr.initialize(None) session, luid = ovr.create() hmdDesc = ovr.getHmdDesc(session) print "Servo client running" def on_message(ws, message): print message def on_error(ws, error): print "### error: ###" print error def on_close(ws): print "### closed ###" ovr.destroy(session) ovr.shutdown() def on_open(ws): print "### opened ###" ws.send("main_client") # Used for anti-shaking xtemp=0 ytemp=0
def start_tracker_app(): oscIdentifier = '/pyBinSim' ip = '127.0.0.1' port = 10000 nSources = 1 minAngleDifference = 5 if (len(sys.argv)) > 1: for i in range(len(sys.argv)): if (sys.argv[i] == '-port'): port = int(sys.argv[i + 1]) if (sys.argv[i] == '-ip'): ip = sys.argv[i + 1] if (sys.argv[i] == '-nSources'): nSources = int(sys.argv[i + 1]) if (sys.argv[i] == '-angleStep'): minAngleDifference = int(sys.argv[i + 1]) # Internal settings: positionVector = np.arange(360) positionVectorSubSampled = range(0, 360, minAngleDifference) # Create OSC client parser = argparse.ArgumentParser() parser.add_argument("--ip", default=ip, help="The ip of the OSC server") parser.add_argument("--port", type=int, default=port, help="The port the OSC server is listening on") args = parser.parse_args() client = udp_client.SimpleUDPClient(args.ip, args.port) # Create OVR session ovr.initialize(None) session, luid = ovr.create() hmdDesc = ovr.getHmdDesc(session) print(hmdDesc.ProductName) try: while 1: # Query the HMD for the current tracking state. ts = ovr.getTrackingState(session, ovr.getTimeInSeconds(), True) if ts.StatusFlags & (ovr.Status_OrientationTracked | ovr.Status_PositionTracked): pose = ts.HeadPose.ThePose yawRad, pitchRad, rollRad = pose.Orientation.getEulerAngles( axis1=1, axis2=0, axis3=2, rotate_direction=1, handedness=1) yaw = int(round(np.rad2deg(yawRad))) pitch = int(round(np.rad2deg(pitchRad))) roll = int(round(np.rad2deg(rollRad))) # Adjustment to desired global origin yaw = 360 - yaw if yaw > 360: yaw = yaw - 360 print(['YAW: ', yaw, ' PITCH: ', pitch, 'ROLL: ', roll]) for n in range(0, nSources): # round yaw to angle stepsize yaw = min(positionVectorSubSampled, key=lambda x: abs(x - yaw)) #channel valueOne valueTwo ValueThree valueFour valueFive ValueSix binSimParameters = [n, yaw, n, 0, 0, 0, 0] print([' Yaw: ', yaw]) client.send_message(oscIdentifier, binSimParameters) #print pose sys.stdout.flush() time.sleep(0.020) except KeyboardInterrupt: # Break if ctrl+c is pressed ovr.destroy(session) ovr.shutdown() print('Done')