Beispiel #1
0
 def respond(self, events):
     """Update status, given the latest events in the queue.
     
     The scripter will normally not need to directly call respond.
     Instead, call EyeScript's checkForResponse function (from the experiment module)
     which will ask each active input device to checkForResponse, which in turn will ask all ResponseCollectors for that device to checkForResponse.
     
     Returns: True if there is a response or if this ResponseCollector timed out, False otherwise
     """
     if self['duration'] not in [
             'infinite', 'stimulus'
     ] and pylink.currentTime(
     ) >= self['onset_time'] + self['duration']:  # timed out
         self.stop()
         return True
     if pylink.currentTime(
     ) >= self['min_rt'] + self['onset_time'] and True in [
             self.handleEvent(event) for event in events
     ]:
         if self.params.get(
                 'cresp', False
         ) != False:  #There is a correct response, so log accuracy
             self.params['acc'] = int(
                 self.params['resp'] == self.params['cresp'])
         else:
             self.params['acc'] = None
         return True
     else:
         return False
Beispiel #2
0
def wait_for_fix(el, used_bubble):
    #print "-----"
    bufferx, buffery = deque(maxlen=3), deque(maxlen=3)
    start = pylink.currentTime()
    while (pylink.currentTime() - start) < TRIAL_LENGTH:
        i = el.getNextData()
        # wenn es etwas anderes ist als RAW_DATA(=200), naechster schleifendurchlauf
        if i != 200: continue
        lastSampleTime = pylink.currentTime()
        # actuelle position direkt vom eye-tracker
        x, y = el.getNewestSample().getLeftEye().getGaze()
        bufferx.append(x)
        buffery.append(y)

        # Compute velocity in degrees per second
        v = np.mean(
            ((np.diff(np.array(bufferx))**2 + np.diff(np.array(buffery))**2)**
             .5) * TRACKING_FREQ) / float(PPD)

        if v < 30:
            for bubble in used_bubble:
                #add 77 to get center of bubble, add 320/60 for higher monitor resolution
                #    if ((sqrt((((bubble[0]+(MAT/2)+320)-x)**2) + (((bubble[1]+(MAT/2)+60)-y)**2))) < 77):
                #        print "Bubble Detected, current speed: %f - %f"%(v,lastSampleTime)
                if ((sqrt((((bubble[0] + (MAT / 2) + 320) - x)**2) +
                          (((bubble[1] + (MAT / 2) + 60) - y)**2))) < 77):
                    return bubble
    return random.choice(
        used_bubble)  #if no fixation on bubble during trial_length
Beispiel #3
0
    def run(self):
        startRecording()
        starttime = pylink.currentTime()
        getExperiment().screen.parameters.bgcolor = self.bgcolor
        getTracker().sendMessage("SYNCTIME")
        if self.pattern == "continuous":
            iteration = 0
            filler = False
            while iteration <= 1.25:
                if filler == False and iteration >= 0.25:
                    # This is point where we're actually going to use the data
                    # Before this was just to get the subject warmed up
                    filler = True
                    getTracker().sendMessage("END_FILLER")
                checkForResponse()
                t = (pylink.currentTime() - starttime) * 0.00012
                t = t - sin(8 * t) / 64
                iteration = t / (2 * pi)
                getExperiment().eyelinkGraphics.draw_cal_target(
                    getExperiment()['screen_size'][0] / 2 + 153 * sin(t) +
                    204 * sin(9 * t),
                    getExperiment()['screen_size'][1] / 2 + 153 * cos(t) +
                    204 * cos(9 * t))

        elif self.pattern == "discrete":
            getExperiment().eyelinkGraphics.setCalibrationColors(
                self.color, self.bgcolor)
            targets = []
            for i in range(3):
                for j in range(3):
                    targets.append([
                        (i + 0.5) * getExperiment()['screen_size'][0] / 3,
                        (j + 0.5) * getExperiment()['screen_size'][1] / 3
                    ])
            for i in range(1, 3):
                for j in range(1, 3):
                    targets.append([
                        i * getExperiment()['screen_size'][0] / 3,
                        j * getExperiment()['screen_size'][1] / 3
                    ])
            random.shuffle(targets)
            targets.append(
                targets[0]
            )  # Redo the first fixation point at the end so we can discard the first one
            for i, target in enumerate(targets):
                if i == 1: getTracker().sendMessage("END_FILLER")
                getExperiment().eyelinkGraphics.draw_cal_target(*target)

                starttime = pylink.currentTime()
                while pylink.currentTime() < 1500 + starttime:
                    checkForResponse()
        else:
            raise "PupilCalibrationTrial:  bad argument to pattern: %s" % self.pattern

        getTracker().sendMessage("END_RT")
        stopRecording()
Beispiel #4
0
 def get_search_box(self):
     self.tracker.readRequest("search_limits_drawbox")
     t = pylink.currentTime()
     while(pylink.currentTime()-t < 500):
       rv= self.tracker.readReply()
       if(rv != None and len(rv)>0):
           v =rv.split(' ')
           v[0] = float(v[0])
           v[1] = float(v[1])
           v[2] = float(v[2])
           v[3] = float(v[3])
           return v
 def handleEvent(self,event):
     if event.type == CEDRUS_BUTTON_DOWN and event.key in self['possible_resp'] or "any" in self['possible_resp']:
         self['rt'] = event.time-self['onset_time']
         self['rt_time'] = event.time
         self['resp'] = event.key            
         getTracker().sendMessage("%d %s.END_RT"%(self['rt_time']-pylink.currentTime(),self['name']))
         return True
     elif event.type == CEDRUS_BUTTON_UP and event.key == self['resp']:
         self['rt_offset'] = event.time - self['onset_time']
         self['rt_offset_time'] = event.time
         getTracker().sendMessage("%d %s.offset"%(self['rt_offset_time']-pylink.currentTime(),self['name']))
         self.stop()
         return False
 def start(self):
     """Start accepting responses, and record the onset time of this ResponseCollector
     """
     self['onset_time'] = pylink.currentTime()
     self['resp'] = None
     getExperiment().response_collectors.append(self)
     self.running = True       
Beispiel #7
0
 def start(self):
     """Start accepting responses, and record the onset time of this ResponseCollector
     """
     self['onset_time'] = pylink.currentTime()
     self['resp'] = None
     getExperiment().response_collectors.append(self)
     self.running = True
Beispiel #8
0
 def draw(self,onset=None):
     """helper method, not directly called in EyeScript scripts in general.
     
     Actually draw the display to the screen.
     
     As the display is drawn, record onset_time and swap_time, start the response collectors, and inform the eyetracker
     
     Optional argument: onset, time in milliseconds when the screen is to be displayed (from the same baseline as for the times 
     returned by pylink.currentTime() and recorded in the eyetracker data file)
     """
     self.drawToBuffer()
     while pylink.currentTime() < onset: checkForResponse()
     self['onset_time']=pylink.currentTime()
     VisionEgg.Core.swap_buffers()
     for rc in self['response_collectors']: rc.start()
     self['swap_time']=pylink.currentTime()-self['onset_time']
     getTracker().sendMessage("%s.SYNCTIME %d"%(self['name'],pylink.currentTime()-self['onset_time']))
Beispiel #9
0
def wait_for_saccade(el):
    start = pylink.currentTime()
    bufferx, buffery = deque(maxlen=3), deque(maxlen=3)
    saccade = False
    lastFixTime = -50
    lastSaccadeOnset = -20
    fixbufferx = []
    fixbuffery = []
    while (pylink.currentTime() - start) < TRIAL_LENGTH:
        # Anfragen, welcher Typ von daten gerade in der pipe wartet.
        i = el.getNextData()
        # wenn es etwas anderes ist als RAW_DATA(=200), naechster schleifendurchlauf
        if i!=200: continue
        lastSampleTime = pylink.currentTime()
		# actuelle position direkt vom eye-tracker
        x, y = el.getNewestSample().getLeftEye().getGaze()
        if pylink.currentTime()-lastSampleTime > 15:	#falls zu lange keine neuen sample-points, beginn von vorne mit neuen decks
            bufferx, buffery = deque(maxlen=3), deque(maxlen=3)
            bufferx.append(x)
            buffery.append(y)
            continue
        
        bufferx.append(x)
        buffery.append(y)
        if len(fixbufferx)<1:
            fixbufferx.append(x)
            fixbuffery.append(y)
            el.trialmetadata("FIXATION", 0.0) # Take first sample as first fix.
            el.trialmetadata("FIXCOOX", x) # markiere fixation als tag in eye-tracking data
            el.trialmetadata("FIXCOOY", y)
            
        # Compute velocity in degrees per second
        v = np.mean(((np.diff(np.array(bufferx))**2+np.diff(np.array(buffery))**2)**.5) * TRACKING_FREQ)/float(PPD)
        
        ## Saccade onset
        if v > 70 and not saccade and (pylink.currentTime() - lastFixTime) > 50:
            lastSaccadeOnset = pylink.currentTime()
            saccade = True
            el.trialmetadata("SACCADE", v)
        
        ## Saccade offset        
        if v < 30 and saccade and (pylink.currentTime() - lastSaccadeOnset) > 20:
            saccade = False
            lastFixTime = pylink.currentTime()
            
            el.trialmetadata("FIXATION", v)
            # Calculate the angle of the current saccade
            el.trialmetadata("FIXCOOX", x)
            el.trialmetadata("FIXCOOY", y)
            
            fixbufferx.append(x)
            fixbuffery.append(y)
            
            return fixbufferx,fixbuffery
    return [-1,-1],[-1,-1]
 def respond(self,events):
     """Update status, given the latest events in the queue.
     
     The scripter will normally not need to directly call respond.
     Instead, call EyeScript's checkForResponse function (from the experiment module)
     which will ask each active input device to checkForResponse, which in turn will ask all ResponseCollectors for that device to checkForResponse.
     
     Returns: True if there is a response or if this ResponseCollector timed out, False otherwise
     """
     if self['duration'] not in ['infinite','stimulus'] and pylink.currentTime() >= self['onset_time'] + self['duration']: # timed out
         self.stop()
         return True
     if pylink.currentTime() >= self['min_rt'] + self['onset_time'] and True in [self.handleEvent(event) for event in events]:
         if self.params.get('cresp',False) != False: #There is a correct response, so log accuracy
             self.params['acc'] = int(self.params['resp']==self.params['cresp'])
         else: self.params['acc'] = None
         return True
     else: return False
Beispiel #11
0
 def handleEvent(self, event):
     if event.type == CEDRUS_BUTTON_DOWN and event.key in self[
             'possible_resp'] or "any" in self['possible_resp']:
         self['rt'] = event.time - self['onset_time']
         self['rt_time'] = event.time
         self['resp'] = event.key
         getTracker().sendMessage(
             "%d %s.END_RT" %
             (self['rt_time'] - pylink.currentTime(), self['name']))
         return True
     elif event.type == CEDRUS_BUTTON_UP and event.key == self['resp']:
         self['rt_offset'] = event.time - self['onset_time']
         self['rt_offset_time'] = event.time
         getTracker().sendMessage(
             "%d %s.offset" %
             (self['rt_offset_time'] - pylink.currentTime(), self['name']))
         self.stop()
         return False
Beispiel #12
0
 def OnRecognition(self, StreamNumber, StreamPosition, RecognitionType, Result):
     """Called when a word/phrase is successfully recognized  -
     ie it is found in a currently open grammar with a sufficiently high
     confidence"""
     newResult = win32com.client.Dispatch(Result)
     pygame.event.post(pygame.event.Event(SPEECH_RECOGNITION,streamPosition=StreamPosition,
                                          word = newResult.PhraseInfo.GetText(),
                                          time = pylink.currentTime() - (win32api.GetTickCount() - newResult.Times.TickCount)
                                          )
                       )
Beispiel #13
0
 def draw(self,onset=None):
     """helper method, not directly called in EyeScript scripts in general.
     
     Actually present the audio stimulus
     
     As it's presented, record onset_time and swap_time, start the response collectors, and inform the eyetracker
     
     Optional argument: onset, time in milliseconds when the stimulus is to be displayed (from the same baseline as for the times 
     returned by pylink.currentTime() and recorded in the eyetracker data file)
     """
     while onset and pylink.currentTime() < onset - self['audio_latency']: checkForResponse()
     self['onset_time']=pylink.currentTime() + self['audio_latency']
     for rc in self['response_collectors']: rc.start()
     if self['audio_package'] == 'winsound' and winsound:
         winsound.PlaySound(self.audiofile,winsound.SND_ASYNC | winsound.SND_FILENAME)
     elif self['audio_package'] == 'pygame':
         self.channel = self.sound.play()
     self['swap_time']=pylink.currentTime()-self['onset_time']
     getTracker().sendMessage("%s.SYNCTIME %d"%(self['name'],pylink.currentTime()-self['onset_time']))
Beispiel #14
0
 def __init__(self):
     if not serial:
         raise "serial module must be installed to use button box."
     try:
         self.port = serial.Serial(getExperiment()['buttonbox_com']-1,getExperiment()['buttonbox_baud'],timeout=0)
     except serial.SerialException:
         raise "Error trying to connect to button box at com %s, baud %s"%(getExperiment()['buttonbox_com'],getExperiment()['buttonbox_baud'])
     self.resetTime = pylink.currentTime()
     self.port.write("e5") # Reset the button box's rt timer
     self.buffer = []
Beispiel #15
0
    def run(self):
        startRecording()
        starttime = pylink.currentTime()
        getExperiment().screen.parameters.bgcolor = self.bgcolor
        getTracker().sendMessage("SYNCTIME")
        if self.pattern == "continuous":
            iteration = 0
            filler = False
            while iteration <= 1.25:
                if filler == False and iteration >= 0.25:
                    # This is point where we're actually going to use the data
                    # Before this was just to get the subject warmed up
                    filler = True
                    getTracker().sendMessage("END_FILLER")
                checkForResponse()
                t = (pylink.currentTime() - starttime) * 0.00012
                t = t - sin(8*t)/64
                iteration = t / (2*pi)
                getExperiment().eyelinkGraphics.draw_cal_target(getExperiment()['screen_size'][0]/2 + 153*sin(t) + 204*sin(9*t),getExperiment()['screen_size'][1]/2 + 153*cos(t) + 204*cos(9*t))
                
        elif self.pattern == "discrete":
            getExperiment().eyelinkGraphics.setCalibrationColors(self.color,self.bgcolor)
            targets = []
            for i in range(3):
                for j in range(3):
                    targets.append([(i+0.5)*getExperiment()['screen_size'][0]/3,(j+0.5)*getExperiment()['screen_size'][1]/3])
            for i in range(1,3):
                for j in range(1,3):
                    targets.append([i*getExperiment()['screen_size'][0]/3,j*getExperiment()['screen_size'][1]/3])
            random.shuffle(targets)
            targets.append(targets[0]) # Redo the first fixation point at the end so we can discard the first one
            for i,target in enumerate(targets):
                if i == 1: getTracker().sendMessage("END_FILLER")
                getExperiment().eyelinkGraphics.draw_cal_target(*target)

                starttime = pylink.currentTime()
                while pylink.currentTime() < 1500+starttime: checkForResponse()
        else:
            raise "PupilCalibrationTrial:  bad argument to pattern: %s"%self.pattern

        getTracker().sendMessage("END_RT")
        stopRecording()
 def handleEvent(self,event):
     """Check if the speech recognition detected a word in possible_resp
     """
     if event.type == SPEECH_RECOGNITION:
         if event.word in self['possible_resp']:
             self['rt_time'] = event.time
             getTracker().sendMessage("%d %s.END_RT"%(event.time-pylink.currentTime(),self['name']))
             self['resp'] = event.word
             self['rt'] = event.time - self['onset_time']
             return True
     return False
Beispiel #17
0
 def poll(self):
     polltime = pylink.currentTime()
     events = pygame.event.get([KEYUP,KEYDOWN])
     for event in events:
         if (event.type == KEYDOWN and
             ((event.key in [pygame.K_LSHIFT,pygame.K_RSHIFT] and event.mod & pygame.KMOD_CTRL and event.mod & pygame.KMOD_ALT) or
              (event.key in [pygame.K_RCTRL,pygame.K_LCTRL] and event.mod & pygame.KMOD_SHIFT and event.mod & pygame.KMOD_ALT) or
              (event.key in [pygame.K_LALT,pygame.K_RALT] and event.mod & pygame.KMOD_CTRL and event.mod & pygame.KMOD_SHIFT)
              )
             ): # Ctrl-Alt-Shift pressed
             raise "Experiment aborted."
     return [ESevent(event,polltime) for event in events]
Beispiel #18
0
 def OnRecognition(self, StreamNumber, StreamPosition, RecognitionType,
                   Result):
     """Called when a word/phrase is successfully recognized  -
     ie it is found in a currently open grammar with a sufficiently high
     confidence"""
     newResult = win32com.client.Dispatch(Result)
     pygame.event.post(
         pygame.event.Event(
             SPEECH_RECOGNITION,
             streamPosition=StreamPosition,
             word=newResult.PhraseInfo.GetText(),
             time=pylink.currentTime() -
             (win32api.GetTickCount() - newResult.Times.TickCount)))
Beispiel #19
0
 def handleEvent(self, event):
     """Check if the speech recognition detected a word in possible_resp
     """
     if event.type == SPEECH_RECOGNITION:
         if event.word in self['possible_resp']:
             self['rt_time'] = event.time
             getTracker().sendMessage(
                 "%d %s.END_RT" %
                 (event.time - pylink.currentTime(), self['name']))
             self['resp'] = event.word
             self['rt'] = event.time - self['onset_time']
             return True
     return False
Beispiel #20
0
 def __init__(self):
     if not serial:
         raise "serial module must be installed to use button box."
     try:
         self.port = serial.Serial(getExperiment()['buttonbox_com'] - 1,
                                   getExperiment()['buttonbox_baud'],
                                   timeout=0)
     except serial.SerialException:
         raise "Error trying to connect to button box at com %s, baud %s" % (
             getExperiment()['buttonbox_com'],
             getExperiment()['buttonbox_baud'])
     self.resetTime = pylink.currentTime()
     self.port.write("e5")  # Reset the button box's rt timer
     self.buffer = []
Beispiel #21
0
 def poll(self):
     polltime = pylink.currentTime()
     events = pygame.event.get([KEYUP, KEYDOWN])
     for event in events:
         if (event.type == KEYDOWN and
             ((event.key in [pygame.K_LSHIFT, pygame.K_RSHIFT] and event.mod
               & pygame.KMOD_CTRL and event.mod & pygame.KMOD_ALT) or
              (event.key in [pygame.K_RCTRL, pygame.K_LCTRL] and event.mod
               & pygame.KMOD_SHIFT and event.mod & pygame.KMOD_ALT) or
              (event.key in [pygame.K_LALT, pygame.K_RALT]
               and event.mod & pygame.KMOD_CTRL and
               event.mod & pygame.KMOD_SHIFT))):  # Ctrl-Alt-Shift pressed
             raise "Experiment aborted."
     return [ESevent(event, polltime) for event in events]
Beispiel #22
0
    def run(self,onset=None):
        checkForResponse() # Clears the pygame event buffer
        self.draw(onset=onset)
        while ((self['duration'] in ['infinite','stimulus'] or pylink.currentTime() < self['onset_time'] + self['duration'])
               and not (self['duration'] == 'stimulus' and (not self.channel or not self.channel.get_busy()))
               ):
            responses = checkForResponse()
            if [rc for rc in self['response_collectors'] if rc in responses]: break

        if self['duration'] == 'stimulus': self.stop()
        
        for rc in self['response_collectors']:
            if rc['duration'] == 'stimulus': rc.stop()
        
        checkForResponse() # This will stop any response collectors whose duration equals this display's duration
        
        self.log()
 def handleEvent(self,event):
     if not getExperiment().recording:
         self.stop()
         return False
     if getTracker():        
         action = getTracker().isRecording()
         if action != pylink.TRIAL_OK:
             raise TrialAbort(action)
         return self.checkEyeLink()
     else:
         # So that the experiment can be tested without the eyetracker,
         # just fake a response after 2000 milliseconds.
         if pylink.currentTime() > self['onset_time'] + 2000:
             self['resp'] = self['possible_resp'] and self['possible_resp'][0]
             self['rt'] = 2000
             self['rt_time'] = self['onset_time'] + 2000
             self.stop()
             return True
Beispiel #24
0
 def handleEvent(self, event):
     if not getExperiment().recording:
         self.stop()
         return False
     if getTracker():
         action = getTracker().isRecording()
         if action != pylink.TRIAL_OK:
             raise TrialAbort(action)
         return self.checkEyeLink()
     else:
         # So that the experiment can be tested without the eyetracker,
         # just fake a response after 2000 milliseconds.
         if pylink.currentTime() > self['onset_time'] + 2000:
             self['resp'] = self['possible_resp'] and self['possible_resp'][
                 0]
             self['rt'] = 2000
             self['rt_time'] = self['onset_time'] + 2000
             self.stop()
             return True
 def checkEyeLink(self):
     time = pylink.currentTime()
     sample = getTracker().getNewestSample()
     sampledata = sample and (
             (self.eyeUsed == 1 and sample.isRightSample() and sample.getRightEye()) or
             (self.eyeUsed == 0 and sample.isLeftSample() and sample.getLeftEye())
             )
     if sampledata:
         for area in self['possible_resp']:
             if area.contains(sampledata.getGaze()):
                 # self.params['rt_time'] = sample.getTime()
                 # self.params['rt'] = sample.getTime() - self.params['onset_time']
                 self.params['rt_time'] = time
                 self.params['rt'] = time - self.params['onset_time']
                 self.params['resp'] = area
                 
                 #This is used in GazePredictor, but it was cleaner  to just put it here -- Craig
                 self.params['xy_coords'] = sampledata.getGaze() 
                 
                 # I'm not sure why we used to stop  here if a response was recorded. I think we only want to stop when we time out -- Mike
                 #self.stop()
                 return True
     return False            
Beispiel #26
0
 def run(self,onset=None):
     """Draws the screen and collects the response.
     
     Optional argument: onset, time in milliseconds when the stimulus is to be displayed (from the same baseline as for the times
     returned by pylink.currentTime() and recorded in the eyetracker data file).
     
     The onset argument will typically be used to precisely space out displays.  For example, the following code will cause a second display to be shown
     precisely 500 milliseconds after a first display is shown.
     
     d1 = TextDisplay(stim1,duration = 0,response_collector = [rc])
     d2 = TextDisplay(stim2)
     d1.run()
     d2.run(onset = d1['onset_time']+500)
     """
     checkForResponse() # Clears the pygame event buffer
     self.draw(onset=onset)
     while self['duration'] == 'infinite' or pylink.currentTime() < self['onset_time'] + self['duration']:
         responses = checkForResponse()
         if [rc for rc in self['response_collectors'] if rc in responses]: break
     for rc in self['response_collectors']:
         if rc['duration'] == 'stimulus': rc.stop()
     checkForResponse() # This will stop any response collectors whose duration equals this display's duration
     self.log()
Beispiel #27
0
 def poll(self):
     polltime = pylink.currentTime()
     events = pygame.event.get([MOUSEBUTTONUP,MOUSEBUTTONDOWN,MOUSEMOTION])
     return [ESevent(event,polltime) for event in events]
# Filename: broadcast_simple.py

import pylink
listener = pylink.EyeLinkListener()

print('wait for the primary connection to the tracker')
link_connected = 0
while not link_connected:
    # access link status info, returns an instance of the ILinkData class
    idata = listener.getTrackerInfo()    
    listener.requestTime()# force tracker to send status and time
    t = pylink.currentTime()
    while(pylink.currentTime()-t < 500):   # wait for response 
        tt = listener.readTime()   # will be nonzero if reply 
        if tt is not 0: # extract connection state
            if (idata.getLinkFlags() & pylink.LINK_CONNECTED):
                print('Link Status: %s - connected' % idata.getLinkFlags())
                link_connected = pylink.LINK_CONNECTED
                break

# send over command to instruct the Host PC to enter the broadcasting mode
listener.broadcastOpen()

# in a while loop, check the current operation mode of the tracker
# and save the sample data (gaze position) to file if in recording mode
smp_data = open('sample_data.csv', 'w')
mode = -1
smp_t = -32768 # initial timestamp for samples
while listener.isConnected():
    current_mode = listener.getTrackerMode() # get the curent Host mode
    # print a warning message when switching modes
Beispiel #29
0
# on a second computer

import pylink
listener = pylink.EyeLinkListener()

print('Wait for a primary connection to the tracker...')
link_connected = 0
while not link_connected:
    # Link status info, returns an instance of the ILinkData class
    idata = listener.getTrackerInfo()

    # Use the requestTime() and readTime() functions in pair to
    # check if the tracker is active; if so, readTime() should return
    # a non-zero value
    listener.requestTime()
    t_start = pylink.currentTime()
    while (pylink.currentTime() - t_start < 500):
        tracker_time = listener.readTime()
        if tracker_time is not 0:
            if (idata.getLinkFlags() & pylink.LINK_CONNECTED):
                print('Link Status: %s - connected' % idata.getLinkFlags())
                link_connected = pylink.LINK_CONNECTED
                break

# Request the Host PC to switch to enter the broadcast mode
listener.broadcastOpen()

# If there is a primary connection, check the current operation mode
# and save the sample data (gaze position) to file if in recording mode
smp_data = open('sample_data.csv', 'w')
mode = -1  # initial tracker operation mode
def drawCondition(FIX_duration, GAP_duration, S1_duration, S2_duration):
    S2_OFF = FIX_duration + GAP_duration + S1_duration + S2_duration ## could be replace with a cumsum of a line
    S1S2_OFFON = FIX_duration + GAP_duration + S1_duration
    S1_ON = FIX_duration + GAP_duration
    FIX_OFF = FIX_duration
    MyEyelink.flushKeybuttons(0)
    buttons =(0, 0);
    # Loop of realtime
    for frameN in xrange(S2_OFF):
        for event in pygame.event.get():
            if event.type == pygame.KEYDOWN:
                    k = pygame.key.get_pressed()
                    m = pygame.key.get_mods()
                    if m & pygame.KMOD_CTRL and k[pygame.K_q]:
                        end_trial();
                        print "Crtl + Q pressed: quit the program"
                        return pylink.ABORT_EXPT;
                    elif m & pygame.KMOD_CTRL and k[pygame.K_r]:
                        end_trial();
                        print "Crtl + R pressed: repeat trial"
                        return pylink.REPEAT_TRIAL
        MySurface.fill(BACKGROUND)
        # check input (should be in a function)
        if dummy:
            MyEyelink.update()

        error = MyEyelink.isRecording()  # First check if recording is aborted
        if error!=0:
            end_trial();
            return error

        # here you draw
        if frameN == 1:
            startTime = pylink.currentTime()
            fixation.draw()
            MyEyelink.sendMessage("STIMULUS Fixation ON %.3f %.3f SYNCTIME %d"%(fixation.getPolarPos(0), fixation.getPolarPos(1), pylink.currentTime()-startTime));
            while (pygame.event.wait().type != pygame.KEYDOWN): pass
            MyEyelink.sendMessage("STIMULUS Fixation PRESSED %.3f %.3f SYNCTIME %d"%(fixation.getPolarPos(0), fixation.getPolarPos(1), pylink.currentTime()-startTime));
        elif frameN < FIX_OFF:
            fixation.draw()
        elif frameN == FIX_OFF:
            MyEyelink.sendMessage("STIMULUS Fixation OFF %.3f %.3f SYNCTIME %d"%(fixation.getPolarPos(0), fixation.getPolarPos(1), pylink.currentTime()-startTime));
        elif frameN == S1_ON:
            startTime = pylink.currentTime()
            stimulus1.draw()
            MyEyelink.sendMessage("STIMULUS S1 ON %.3f %.3f SYNCTIME %d"%(stimulus1.getPolarPos(0), stimulus1.getPolarPos(1), pylink.currentTime()-startTime));
        elif frameN > S1_ON and frameN < S1S2_OFFON:
            stimulus1.draw()
        elif frameN == S1S2_OFFON:
            startTime = pylink.currentTime()
            stimulus2.draw()
            MyEyelink.sendMessage("STIMULUS S1 OFF %.3f %.3f SYNCTIME %d"%(stimulus1.getPolarPos(0), stimulus1.getPolarPos(1), pylink.currentTime()-startTime));
            MyEyelink.sendMessage("STIMULUS S2 ON %.3f %.3f SYNCTIME %d"%(stimulus2.getPolarPos(0), stimulus2.getPolarPos(1), pylink.currentTime()-startTime));
        elif frameN > S1S2_OFFON and frameN < (S2_OFF-1):
            stimulus2.draw()
        elif frameN == (S2_OFF-1):
            MyEyelink.sendMessage("STIMULUS S2 OFF %.3f %.3f SYNCTIME %d"%(stimulus2.getPolarPos(0), stimulus2.getPolarPos(1), pylink.currentTime()-startTime));
        if dummy:
            text.draw()
            utils.drawFPS(fps, clock)
        display.flip()
        FRAME_INTERVALS.append(clock.tick_busy_loop(FPS_CONTROL))

    end_trial();

    #The TRIAL_RESULT message defines the end of a trial for the EyeLink Data Viewer.
    #This is different than the end of recording message END that is logged when the trial recording ends.
    #Data viewer will not parse any messages, events, or samples that exist in the data file after this message.
    MyEyelink.sendMessage("TRIAL_RESULT %d"%(buttons[0]));
    return MyEyelink.getRecordingStatus()
    def doSim(self, trial, road, duration, tau, doEyetrack):

        # Measure sample rate in order to calculate delay buffer
        sample_rate = self.screen.measure_refresh_rate(2.0)
        print "Sample rate: " + str(sample_rate)
        #sample_rate = 60

        self.doEyetrack = doEyetrack

        self.pos_ring = RingBuffer(self.center,
                                   int(math.floor(tau * sample_rate)) + 1)
        print("Ring Buffer:: size: " + str(self.pos_ring.size))

        if doEyetrack:
            import pylink
            from EyeLinkCoreGraphicsVE import EyeLinkCoreGraphicsVE

            self.tracker = pylink.EyeLink()
            if self.tracker == None:
                print "Error: Eyelink is not connected"
                sys.exit()

            genv = EyeLinkCoreGraphicsVE(self.screen, self.tracker)
            pylink.openGraphicsEx(genv)

            #Opens the EDF file.
            edfFileName = "TRIAL" + str(trial) + ".EDF"
            self.tracker.openDataFile(edfFileName)

            pylink.flushGetkeyQueue()

            self.tracker.sendCommand("screen_pixel_coords =	0 0 %d %d" %
                                     (VisionEgg.config.VISIONEGG_SCREEN_W,
                                      VisionEgg.config.VISIONEGG_SCREEN_H))

            tracker_software_ver = 0
            eyelink_ver = self.tracker.getTrackerVersion()
            if eyelink_ver == 3:
                tvstr = self.tracker.getTrackerVersionString()
                vindex = tvstr.find("EYELINK CL")
                tracker_software_ver = int(
                    float(tvstr[(vindex + len("EYELINK CL")):].strip()))

            if eyelink_ver >= 2:
                self.tracker.sendCommand("select_parser_configuration 0")
                if eyelink_ver == 2:  #turn off scenelink camera stuff
                    self.tracker.sendCommand("scene_camera_gazemap = NO")
            else:
                self.tracker.sendCommand("saccade_velocity_threshold = 35")
                self.tracker.sendCommand(
                    "saccade_acceleration_threshold = 9500")

            # set EDF file contents
            self.tracker.sendCommand(
                "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON"
            )
            if tracker_software_ver >= 4:
                self.tracker.sendCommand(
                    "file_sample_data	= LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET"
                )
            else:
                self.tracker.sendCommand(
                    "file_sample_data	= LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")

            # set link data (used for gaze cursor)
            self.tracker.sendCommand(
                "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
            if tracker_software_ver >= 4:
                self.tracker.sendCommand(
                    "link_sample_data	= LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET"
                )
            else:
                self.tracker.sendCommand(
                    "link_sample_data	= LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")

            if not self.doneSetup:
                self.tracker.doTrackerSetup()
                self.doneSetup = True
            else:
                while 1:
                    try:
                        error = self.tracker.doDriftCorrect(
                            self.screen.size[0] / 2, self.screen.size[1] / 2,
                            1, 1)
                        if error != 27:  # ?? from example
                            break
                        else:
                            self.tracker.doTrackerSetup()
                    except:
                        break

        self.screen.parameters.bgcolor = 106.0 / 255.0, 147.0 / 255.0, 0.0
        # Load road data from file and create an image
        roadArray = numpy.loadtxt('road' + str(road) + '.txt')

        # Convert to a Path
        roadPath = ImagePath.Path(
            map(lambda xy: (xy[0], xy[1]), roadArray.tolist()))

        # Use Path to create a plot of the road
        im = Image.new("RGB", (2000, 100), (50, 50, 50))
        draw = ImageDraw.Draw(im)

        # draw each side of the road separately
        draw.line(roadPath[:4000], fill=(200, 200, 200))
        draw.line(roadPath[4000:], fill=(200, 200, 200))

        del draw

        # Lay out a road texture in the x-z plane
        roadTexture = Texture(im)

        del im

        eye_height = 2.5

        vertices = [(-10, -eye_height, 0), (-10, -eye_height, -1000),
                    (10, -eye_height, 0), (10, -eye_height, -1000)]

        rect = TextureStimulus3D(texture=roadTexture,
                                 lowerleft=vertices[0],
                                 lowerright=vertices[1],
                                 upperleft=vertices[2],
                                 upperright=vertices[3])

        # We will use these later for our camera transforms
        self.camera_matrix = ModelView()
        self.frame_timer = FrameTimer()

        self.outf = open(
            'steersim-' + str(trial) + '-' + str(road) + '-out.txt', 'wb')

        # Vewport for the road
        viewport3D = Viewport(
            screen=self.screen,
            projection=SimplePerspectiveProjection(fov_x=75.2),
            camera_matrix=self.camera_matrix,
            stimuli=[rect])

        # Construct a sky
        sky_l = 0
        sky_r = self.screen.size[0]
        sky_t = self.screen.size[1]
        sky_b = self.screen.size[1] / 2

        sky_vertices = [(sky_l, sky_t, 0), (sky_r, sky_t, 0),
                        (sky_r, sky_b, 0), (sky_l, sky_b, 0)]

        sky = Rectangle3D(color=(144.0 / 255.0, 190.0 / 255.0, 1.0),
                          vertex1=sky_vertices[0],
                          vertex2=sky_vertices[1],
                          vertex3=sky_vertices[2],
                          vertex4=sky_vertices[3])

        wheelTexture = Texture('wheel.png')
        self.wheel = TextureStimulus(texture=wheelTexture,
                                     internal_format=gl.GL_RGBA,
                                     position=(self.center, -75),
                                     anchor='center')

        # display the sky in its own viewport
        viewport2D = Viewport(screen=self.screen)
        viewport2D.parameters.stimuli = [sky, self.wheel]

        self.init_state()

        askText = Text(text='Press a key to start',
                       anchor='center',
                       position=(self.center, self.screen.size[1] / 2))
        splash = Viewport(screen=self.screen)
        splash.parameters.stimuli = [askText]
        self.askForNext = Presentation(go_duration=(0.5, 'seconds'),
                                       viewports=[splash])
        self.askForNext.add_controller(
            None, None, FunctionController(during_go_func=self.wait_for_key))
        self.askForNext.parameters.enter_go_loop = True
        self.askForNext.run_forever()

        self.simPres = Presentation(go_duration=(duration, 'seconds'),
                                    viewports=[viewport3D, viewport2D],
                                    handle_event_callbacks=[
                                        (pygame.KEYDOWN, self.check_keypress)
                                    ])
        self.simPres.add_controller(
            None, None, FunctionController(during_go_func=self.update))

        if doEyetrack:
            startTime = pylink.currentTime()
            self.tracker.sendMessage("SYNCTIME %d" %
                                     (pylink.currentTime() - startTime))
            error = self.tracker.startRecording(1, 1, 1, 1)
            self.tracker.sendMessage("PRES %d START" % (trial))

        self.simPres.go()

        if doEyetrack:
            self.tracker.sendMessage("PRES %d END" % (trial))
            self.tracker.stopRecording()

            # File transfer and cleanup!
            self.tracker.setOfflineMode()
            pylink.msecDelay(500)
            #Close the file and transfer it to Display PC
            self.tracker.closeDataFile()
            self.tracker.receiveDataFile(edfFileName, edfFileName)

        self.outf.close()

        if self.quit:
            raise SystemExit
Beispiel #32
0
 def poll(self):
     polltime = pylink.currentTime()
     events = pygame.event.get(
         [MOUSEBUTTONUP, MOUSEBUTTONDOWN, MOUSEMOTION])
     return [ESevent(event, polltime) for event in events]
Beispiel #33
0
def sacc_detection(el, used_bubble):

    #buffer for x coordiante, y coordinate, velocity
    bufferx, buffery, bufferv = deque(maxlen=3), deque(maxlen=3), deque(
        maxlen=4)
    start = pylink.currentTime()
    saccade = 0
    #start_time = []
    while (pylink.currentTime() - start) < TRIAL_LENGTH:
        i = el.getNextData()
        # wenn es etwas anderes ist als RAW_DATA(=200), naechster schleifendurchlauf
        if i != 200: continue
        # actuelle position direkt vom eye-tracker
        x, y = el.getNewestSample().getLeftEye().getGaze()
        bufferx.append(float(x))
        buffery.append(float(y))

        # Compute velocity in degrees per second
        bufferv.append(
            np.mean(((np.diff(np.array(bufferx))**2 +
                      np.diff(np.array(buffery))**2)**.5) * TRACKING_FREQ) /
            float(PPD))

        #saccade_onset
        if bufferv[-1] < 30:
            saccade = 0
            #check if sample already in next bubble
            for bubble in used_bubble:
                if ((sqrt((((bubble[0] + (MAT / 2) + 320) - x)**2) +
                          (((bubble[1] + (MAT / 2) + 60) - y)**2))) < MAT / 2):
                    el.trialmetadata('start_x', bufferx[-1])
                    el.trialmetadata('start_y', buffery[-1])
                    el.trialmetadata('start_velocity', bufferv[-1])
                    el.trialmetadata('end_x', bufferx[-1])
                    el.trialmetadata('end_y', buffery[-1])
                    el.trialmetadata('end_velocity', bufferv[-1])
                    el.trialmetadata('sacc_detection', 'start_in_bubble')
                    return bubble
        if saccade == 0 and bufferv[-1] > 70:
            start_x = float(bufferx[-1])
            start_y = float(buffery[-1])
            #start_time = pylink.currentTime()
            saccade = 1
            el.trialmetadata('start_x', start_x)
            el.trialmetadata('start_y', start_y)
            el.trialmetadata('start_velocity', bufferv[-1])
            #continue
        '''    
        #saccade end
        if start_time and np.all(np.diff(bufferv)<0):
            
            #if abs((start_x - bufferx[-1])) < 0.00000001:
            #    alpha = 3.1415926535897931 / 2 # pi/2 = 90deg
            #else:
            alpha = atan2((buffery[-1]-start_y),(bufferx[-1]-start_x))
            
            predLength = exp((log(bufferv[0]) - 4.6)/.55)*PPD
            predX = start_x + cos(alpha) * predLength
            predY = start_y + sin(alpha) * predLength
            el.trialmetadata('predX', predX)
            el.trialmetadata('predY', predY)
            el.trialmetadata('end_velocity', bufferv)
            start_time = []
            for bubble in used_bubble:
                if ((sqrt((((bubble[0]+(MAT/2)+320)-predX)**2) + (((bubble[1]+(MAT/2)+60)-predY)**2))) < MAT):
                    print "predicted bubble found"
                    return bubble 
        '''
        if bufferv[-1] < 50 and saccade:
            for bubble in used_bubble:
                if ((sqrt((((bubble[0] + (MAT / 2) + 320) - x)**2) +
                          (((bubble[1] +
                             (MAT / 2) + 60) - y)**2))) < 2 * MAT / 3):
                    el.trialmetadata('end_x', bufferx[-1])
                    el.trialmetadata('end_y', buffery[-1])
                    el.trialmetadata('end_velocity', bufferv[-1])
                    el.trialmetadata('sacc_detection', 'pred_in_bubble')
                    return bubble

        #check if sample near bubble (in distance of 2 * radius MAT/2)
    #print "random bubble returned"
    el.trialmetadata('sacc_detection', 'random')
    return random.choice(
        used_bubble)  #if no prediction on bubble during trial_length