コード例 #1
0
 def destroy(self):
     if self.hmd is not None:
         ovr.destroy(self.hmd)
     self.hmd = None
     self.luid = None
     self.hmdDesc = None
     self.hmd = None
コード例 #2
0
ファイル: rift.py プロジェクト: jherico/pyovr
 def destroy(self):
   if self.session is not None:
     ovr.destroy(self.session)
   self.session = None
   self.luid = None
   self.hmdDesc = None
   self.session = None
コード例 #3
0
def exit(session):
    '''
    Exit program.
    
    Parameters
    ----------
    session : pointer to HmdDesc
        Interface for the head-moounted device
    '''
    
    ovr.destroy(session)
    ovr.shutdown()
コード例 #4
0
def rift(label, filename, video_length_in_s, process_name, autoplay_enabled,
         overwrite_enabled, acr, captureviewport):
    """
    Obtains and saves the data captured by the Oculus Rift in a JSON file.

    :param label: The label of the subject.
    :param filename: The filename of the video.
    :param video_length_in_s: The length of the video in s.
    :param process_name: The name of the process to monitor.
    :param autoplay_enabled: Whether automatic playback of the video is enabled or not.
    :param overwrite_enabled: Whether overwriting the current dataset is enabled or not.
    """
    video_playing_started = False  # Necessary for getting information about playback state of video
    data = {
        'filename': filename,
        'video_length_in_s': float(video_length_in_s),
        'hmd': 'rift',
        'pitch_yaw_roll_data_hmd': []
    }
    if acr:
        data["acr"] = 0

    # Initialize ovr session here as otherwise the video wouldn't be shown on the Rift.
    ovr.initialize(None)
    session, luid = ovr.create()

    if autoplay_enabled:
        # Sleep a few seconds until the video is completely loaded by Whirligig
        if process_name == "Whirligig64bit.exe":
            helpers.autoplay_sleep(
                float(5.5)
            )  # Adjust value based on your computing power (between 4 and 6 is normal)
    else:
        print(
            "If the player has loaded the video, press the SPACE key to start the measurement."
        )

    while video_playing_started == False:
        if autoplay_enabled:
            pyautogui.press("space")
            start_time = timer()
            print("Recording of Pitch/Yaw/Roll/Time data will start now.")
            video_playing_started = True
        else:
            if keyboard.is_pressed("space"):
                start_time = timer()
                print("Recording of Pitch/Yaw/Roll/Time data will start now.")
                video_playing_started = True
    try:
        pyautogui.press(
            "r"
        )  # Press R key to assure the user starts watching the video at initial position
        current_playback_state = timer() - start_time
        pitch, yaw, roll = getPYRDataFromRift(session)
        # Define the current position of the user (very first yaw value) as initial position
        initial_yaw = yaw

        # Start viewport capturing if enabled
        if captureviewport:
            helpers.capture_viewport(video_length_in_s,
                                     "%s_%s" % (label, filename.split(".")[0]))

        # Get Head orientation data from Vive in loop until the playback is finished
        while video_length_in_s > current_playback_state:
            pitch, yaw, roll = getPYRDataFromRift(session)
            current_playback_state = timer() - start_time
            # Adjust the yaw value according to the very first recorded yaw value
            yaw = helpers.shift_yaw(yaw, initial_yaw)
            if current_playback_state is not None:
                data["pitch_yaw_roll_data_hmd"].append({
                    'pitch':
                    pitch,
                    'yaw':
                    yaw,
                    'roll':
                    roll,
                    'sec':
                    current_playback_state
                })
                # print("Pitch/Yaw/Roll:  %s  /  %s  /  %s" % (int(pitch), int(yaw), int(roll)))
                # By adjusting this value, you can set the recording frequency (currently 200Hz).
                time.sleep(float(0.005))
        if acr:
            data["acr"] = helpers.get_acr_by_cmd_input()
        helpers.write_to_json(label, data,
                              filename.split(".")[0], overwrite_enabled)
        ovr.destroy(session)
        ovr.shutdown()
        print("Measurement done. Opening the next video if available.")
    except KeyboardInterrupt:  # Break if ctrl+c is pressed
        ovr.destroy(session)
        ovr.shutdown()
        print(
            "Current Measurement is interrupted and will not be saved. The program will exit now."
        )
コード例 #5
0
        if elapsed - experimentStart > initalWaitTime - 5:
            scenario = myScenario

        if elapsed - experimentStart > initalWaitTime:
            start = True

        if elapsed - experimentStart > initalWaitTime + droneFlybyTime:
            start = False
            reset = True
            scenario = 0

        if elapsed - experimentStart > initalWaitTime + droneFlybyTime + postWaitTime and iterations > 0:
            experimentStart = elapsed
            iterations -= 1

        if iterations <= 0:
            break

        print elapsed
        #unix.sleep(Ts)

except KeyboardInterrupt:
    print "Interrupted"

ovr.destroy(hmd)
ovr.shutdown()
ser.write(struct.pack('B', 0x20))
wait_for_ack()
ser.close()
print "All done!"
コード例 #6
0
ファイル: test_properties.py プロジェクト: jherico/pyovr
 def tearDown(self):
     ovr.destroy(self.hmd)
コード例 #7
0
ファイル: main_client.py プロジェクト: precisit/telepresence
def on_close(ws):
    print "### closed ###"
    ovr.destroy(session)
    ovr.shutdown()
コード例 #8
0
@author: maddo
'''

import sys
import time
import ovr

ovr.initialize(None)
session, luid = ovr.create()
hmdDesc = ovr.getHmdDesc(session)
print(hmdDesc.ProductName)
ts = ovr.getTrackingState(session, ovr.getTimeInSeconds(), True)
if ts.StatusFlags & (ovr.Status_OrientationTracked
                     | ovr.Status_PositionTracked):
    pose = ts.HeadPose
for t in range(100):
    # Query the HMD for the current tracking state.
    ts = ovr.getTrackingState(session, ovr.getTimeInSeconds(), True)
    if ts.StatusFlags & (ovr.Status_OrientationTracked
                         | ovr.Status_PositionTracked):
        pose = ts.HeadPose
        #print(pose.ThePose.Orientation.x) #ovr.Quatf = Orientation, ovr.Vector3f = Position
        #print(pose)
        nextupdown = pose.ThePose.Orientation.z
        nextleftright = pose.ThePose.Orientation.y
        print(nextupdown)
        print(nextleftright)
        sys.stdout.flush()
    time.sleep(0.100)
ovr.destroy(session)
ovr.shutdown()
コード例 #9
0
ファイル: exp.py プロジェクト: arunlakshmanan/quadrotorUnity
            scenario = myScenario # - 5.0
        
        if elapsed - experimentStart > initWait:
            start = True
        
        if elapsed - experimentStart > initWait+droneFlybyTime:
            start = False
            reset = True
            scenario = 0
            
        if elapsed - experimentStart > initWait+droneFlybyTime+postWait and iterations > 0:
            experimentStart = elapsed
            iterations -= 1
            reset=False
        
        if iterations <= 0:
            break
        
        print elapsed
        #unix.sleep(Ts)

except KeyboardInterrupt:
    print "Interrupted"
    
ovr.destroy(hmd)
ovr.shutdown()
ser.write(struct.pack('B', 0x20))
wait_for_ack()
ser.close()
print "All done!"
コード例 #10
0
def start_tracker_app():

    oscIdentifier = '/pyBinSim'
    ip = '127.0.0.1'
    port = 10000
    nSources = 1
    minAngleDifference = 5

    if (len(sys.argv)) > 1:
        for i in range(len(sys.argv)):

            if (sys.argv[i] == '-port'):
                port = int(sys.argv[i + 1])

            if (sys.argv[i] == '-ip'):
                ip = sys.argv[i + 1]

            if (sys.argv[i] == '-nSources'):
                nSources = int(sys.argv[i + 1])

            if (sys.argv[i] == '-angleStep'):
                minAngleDifference = int(sys.argv[i + 1])

    # Internal settings:
    positionVector = np.arange(360)
    positionVectorSubSampled = range(0, 360, minAngleDifference)

    # Create OSC client
    parser = argparse.ArgumentParser()
    parser.add_argument("--ip", default=ip, help="The ip of the OSC server")
    parser.add_argument("--port",
                        type=int,
                        default=port,
                        help="The port the OSC server is listening on")
    args = parser.parse_args()

    client = udp_client.SimpleUDPClient(args.ip, args.port)

    # Create OVR session
    ovr.initialize(None)
    session, luid = ovr.create()
    hmdDesc = ovr.getHmdDesc(session)
    print(hmdDesc.ProductName)

    try:
        while 1:
            # Query the HMD for the current tracking state.
            ts = ovr.getTrackingState(session, ovr.getTimeInSeconds(), True)

            if ts.StatusFlags & (ovr.Status_OrientationTracked
                                 | ovr.Status_PositionTracked):
                pose = ts.HeadPose.ThePose

                yawRad, pitchRad, rollRad = pose.Orientation.getEulerAngles(
                    axis1=1,
                    axis2=0,
                    axis3=2,
                    rotate_direction=1,
                    handedness=1)

                yaw = int(round(np.rad2deg(yawRad)))
                pitch = int(round(np.rad2deg(pitchRad)))
                roll = int(round(np.rad2deg(rollRad)))

                # Adjustment to desired global origin
                yaw = 360 - yaw

                if yaw > 360:
                    yaw = yaw - 360

                print(['YAW: ', yaw, ' PITCH: ', pitch, 'ROLL: ', roll])

                for n in range(0, nSources):
                    # round yaw to angle stepsize
                    yaw = min(positionVectorSubSampled,
                              key=lambda x: abs(x - yaw))
                    #channel valueOne valueTwo ValueThree valueFour valueFive ValueSix
                    binSimParameters = [n, yaw, n, 0, 0, 0, 0]
                    print([' Yaw: ', yaw])
                    client.send_message(oscIdentifier, binSimParameters)

                #print pose
                sys.stdout.flush()

            time.sleep(0.020)

    except KeyboardInterrupt:  # Break if ctrl+c is pressed

        ovr.destroy(session)
        ovr.shutdown()

        print('Done')
コード例 #11
0
 def tearDown(self):
     ovr.destroy(self.hmd)
コード例 #12
0
ファイル: PythonApplication1.py プロジェクト: ckasner/TFG
            pos=0
            degaux = deg
            deg = 0
         

        pos=0
        deg = 0



        if(abs(degauxH-degH)>8):
           
            data = str(degH)
            print data
            q2.send(data)  
   
            posH=0
            degauxH = degH
            degH = 0
         

        posH=0
        degH = 0
  
    
    time.sleep(0.200)
    


ovr.destroy(session)
ovr.shutdown()