Exemple #1
0
def eyeQuitRecording(eyeTracker, dataFileName):
    eyeTracker.setOfflineMode()
    eyeTracker.closeDataFile()
    pylink.pumpDelay(50)

    eyeTracker.receiveDataFile(dataFileName, dataFileName + ".EDF")
    eyeTracker.close()
Exemple #2
0
 def End_trial(self):
     pylink.endRealTimeMode()  # Fin mode temps réel
     pylink.pumpDelay(
         100)  # ajout 100ms de donnée pour les évenement finaux
     self.eyelink.stopRecording()
     while self.eyelink.getkey():
         pass
Exemple #3
0
 def endTrial(self):
     '''Ends recording: adds 100 msec of data to catch final events'''
     pylink.endRealTimeMode()
     pylink.pumpDelay(100)
     self.tracker.stopRecording()
     while self.tracker.getkey():
         pass
Exemple #4
0
 def endTrial(self):
     '''Ends recording: adds 100 msec of data to catch final events'''
     pylink.endRealTimeMode()
     pylink.pumpDelay(100)
     self.tracker.stopRecording()
     while self.tracker.getkey() :
         pass;
 def endTrial(self):
     '''Ends recording: adds 100 msec of data to catch final events'''
     pylink.endRealTimeMode(
     )  #Returns the application to a priority slightly above normal, to end realtime mode
     pylink.pumpDelay(100)  # ???????
     self.tracker.stopRecording(
     )  #Stops recording, resets EyeLink data mode. Call 50 to 100 msec after an event occurs that ends the trial.
Exemple #6
0
 def postTrial(self):
     '''Ends recording: adds 100 msec of data to catch final events'''
     self.tracker.sendMessage("POSTTRIAL")
     pylink.endRealTimeMode()
     pylink.pumpDelay(100)
     self.tracker.stopRecording()
     while self.tracker.getkey() :
         pass;
def end_trial():
    '''Ends recording: adds 100 msec of data to catch final events'''
    MySurface.fill((0,0,0))
    ## should clear the screen!!!!!!!!!!!
    pylink.endRealTimeMode();
    pylink.pumpDelay(100);
    MyEyelink.stopRecording();
    while MyEyelink.getkey() :
        pass;
Exemple #8
0
    def stop_recording(self):
        '''Ends recording: adds 100 msec of data to catch final events'''

        pylink.endRealTimeMode()
        pylink.pumpDelay(100)
        self.tracker.stopRecording()
        # self.display_message('Recording stopped.')

        while self.tracker.getkey():
            pass
Exemple #9
0
	def stop(self):
		"""Stops recording data from the eye tracker. 
				
		Called automatically at the end of each trial unless ``P.manual_eyelink_recording`` is
		True, in which case it must be called manually in order to stop recording at any point. 
		To resume recording after this method is called, use the :meth:`start` method.

		"""
		endRealTimeMode()
		pumpDelay(100)
		self.stopRecording()
		self.__recording = False
		self.sendMessage("TRIAL OK")
		flushGetkeyQueue()
 def start_recording(self):
     self.recording = False
     pylink.getEYELINK().sendMessage('TRIALID %s'%(trialNum))############################################
     pylink.getEYELINK().sendCommand("record_status_message 'trial %s image %s'" %(trialNum,scenestim))############################################
     pylink.getEYELINK().sendCommand("set_idle_mode")
     core.wait(.05) #delay so tracker is ready (using psychopy)############################################
     pylink.getEYELINK().sendCommand("clear_screen 0")
     pylink.getEYELINK().sendCommand("draw_box %s %s %s %s %s" %(self.w/2 - 100, self.h/2 - 100, self.w/2 + 100, self.h/2 + 100,  7))
     
     # Begin recording
     pylink.getEYELINK().startRecording(1, 1, 1, 1)
     pylink.pumpDelay(100)#100 milliseconds of data to accumulate before the trial display starts    
     self.inbox = False #reset gaze congingent fixation
     self.Finished = False #if gaze congingent fixation failed
Exemple #11
0
def CoolDown():
    
    # Save behavioral data
    thisExp.nextEntry() # advance data file
    thisExp.saveAsWideText(filename + '.csv')
    
    # stop recording eye data
    pylink.endRealTimeMode()
    pylink.pumpDelay(100)
    error = tk.stopRecording()
    
    # Clear the screen
    win.flip()
    
    # display cool-down message
    message1.setText("That's the end! ")
    message2.setText("Press 'q' or 'escape' to end the run.")
    win.logOnFlip(level=logging.EXP, msg='Display TheEnd')
    message1.draw()
    message2.draw()
    win.flip()
    
    # Wait for keypress
    thisKey = event.waitKeys(keyList=['q','escape'])

    # --- EyeLink code --- #
    # close the EDF data file
    tk.setOfflineMode()
    tk.closeDataFile()
    pylink.pumpDelay(50)

    # Get the EDF data and say goodbye
    message1.text='Data transfering.....'
    message1.draw()
    win.flip()
    tk.receiveDataFile(dataFileName, dataFolder + dataFileName)

    #close the link to the tracker
    tk.close()

    # close the graphics
    pylink.closeGraphics()
    # --- End EyeLink code --- #

    # exit
    win.close()
    core.quit()
Exemple #12
0
    def stop(self, outfile):
        '''
        stop recording and receive the data file if outfile is not None.
        '''

        # pumpDelay is a lower priority delay which does not block background
        # events. msecDelay is more aggressive. Here used to catch last bit of
        # data before stopping the recording
        pylink.pumpDelay(100)
        # idle mode
        self.tracker.setOfflineMode()
        pylink.msecDelay(500)
        # close the file on the tracker HD. Can take a while...
        self.tracker.closeDataFile()
        if outfile is not None:
            self.tracker.receiveDataFile(self.remotefilename, outfile)
        self.tracker.close()
        return
def start_recording():
    global eye_used
    global dt
    global inbox
    global Finished
    pylink.getEYELINK().sendMessage('TRIALID %s'%(trialNum))
    pylink.getEYELINK().sendCommand("record_status_message 'trial %s image %s'" %(trialNum,scenestim))
    pylink.getEYELINK().sendCommand("set_idle_mode")
    core.wait(.05) #delay so tracker is ready (using psychopy)
    pylink.getEYELINK().sendCommand("clear_screen 0")
    pylink.getEYELINK().sendCommand("draw_box %s %s %s %s %s" %(w/2 - 100, h/2 - 100, w/2 + 100, h/2 + 100,  7))
    
    # Begin recording
    pylink.getEYELINK().startRecording(1, 1, 1, 1)
    pylink.pumpDelay(100)#100 milliseconds of data to accumulate before the trial display starts    
    dt = pylink.getEYELINK().getNewestSample() # check for new sample update
    eye_used = pylink.getEYELINK().eyeAvailable() #determine which eye(s) are available
    
    inbox = False #reset gaze congingent fixation
    Finished = False #if gaze congingent fixation failed
def abort_trial():
    """Ends recording """

    el_tracker = pylink.getEYELINK()

    # Stop recording
    if el_tracker.isRecording():
        # add 100 ms to catch final trial events
        pylink.pumpDelay(100)
        el_tracker.stopRecording()

    # clear the screen
    clear_screen(win)
    # Send a message to clear the Data Viewer screen
    bgcolor_RGB = (116, 116, 116)
    el_tracker.sendMessage('!V CLEAR %d %d %d' % bgcolor_RGB)

    # send a message to mark trial end
    el_tracker.sendMessage('TRIAL_RESULT %d' % pylink.TRIAL_ERROR)

    return pylink.TRIAL_ERROR
Exemple #15
0
    def drift(self, tracker, mov, win):
        """Perform drift correction over paused video."""

        # mov.pause()
        # take the tracker offline
        tracker.setOfflineMode()
        pylink.pumpDelay(50)
        stim = visual.GratingStim
        # set position coordinates
        x = win.size[0] / 2
        y = win.size[1] / 2
        # set next frame in background
        mov.draw()
        # create and present dot on background
        dot1 = stim(win, tex='none', mask='circle', size=5, color=[1, 1, 1])
        dot2 = stim(win,
                    tex='none',
                    mask='circle',
                    size=25,
                    color=[-1, -1, -1])
        dot2.draw()
        dot1.draw()
        # present background with overlayed fixation point
        win.flip()

        try:
            # setting the third value to zero preserves background image
            tracker.sendCommand('drift_correction_rpt_error 20.0')
            tracker.doDriftCorrect(x, y, 0, 0)
            tracker.getCalibrationMessage()
        except:
            pass

        # start recording
        tracker.setOfflineMode()
        pylink.pumpDelay(50)
        tracker.startRecording(1, 1, 1, 1)
        pylink.pumpDelay(
            50)  # wait for 100 ms to make sure data of interest is recorded
        return mov, win
def lissajous_func(trial_dur, movement_pars, trial_index):
    """
    a function to run Lissajous movement trial.
    :param trial_dur: the duration of the pursuit movement
    :param movement_pars: [amp_x, amp_y, phase_x, phase_y, freq_x, freq_y]
    :param trial_index: record the order of trial presentation in the task
    :return:
    """

    # parse the movement patter parameters
    movement, amp_x, amp_y, phase_x, phase_y, freq_x, freq_y = movement_pars

    # get a reference to the currently active EyeLink connection
    el_tracker = pylink.getEYELINK()

    # put the tracker in the offline mode first
    el_tracker.setOfflineMode()

    # send a "TRIALID" message to mark the start of a trial, see Data
    # Viewer User Manual, "Protocol for EyeLink Data to Viewer Integration"
    el_tracker.sendMessage('TRIALID %d' % trial_index)

    # record_status_message : show some info on the Host PC
    # here we show how many trial has been tested
    status_msg = 'TRIAL number %d, %s' % (trial_index, movement)
    el_tracker.sendCommand("record_status_message '%s'" % status_msg)

    # draw a reference grid on the Host PC screen
    # For details, See section 25.7 'Drawing Commands' in the
    # EyeLink Programmers Guide manual
    line_hor = (scnWidth / 2.0 - amp_x, scnHeight / 2.0,
                scnWidth / 2.0 + amp_x, scnHeight / 2.0)
    line_ver = (scnWidth / 2.0, scnHeight / 2.0 - amp_y, scnWidth / 2.0,
                scnHeight / 2.0 + amp_x)
    el_tracker.sendCommand('clear_screen 0')  # clear the host Display
    el_tracker.sendCommand('draw_line %d %d %d %d 15' % line_hor)
    el_tracker.sendCommand('draw_line %d %d %d %d 15' % line_ver)

    # put tracker in idle/offline mode before recording
    el_tracker.setOfflineMode()

    # Start recording
    # arguments: sample_to_file, events_to_file, sample_over_link,
    # event_over_link (1-yes, 0-no)
    try:
        el_tracker.startRecording(1, 1, 1, 1)
    except RuntimeError as error:
        print("ERROR:", error)
        abort_trial()
        return pylink.TRIAL_ERROR

    # Allocate some time for the tracker to cache some samples
    pylink.pumpDelay(100)

    # Send a message to clear the Data Viewer screen, get it ready for
    # drawing the pictures during visualization
    bgcolor_RGB = (116, 116, 116)
    el_tracker.sendMessage('!V CLEAR %d %d %d' % bgcolor_RGB)

    # open a INTEREAT AREA SET file to make a dynamic IA for the target
    ias = 'IA_%d.ias' % trial_index
    ias_file = open(os.path.join(aoi_folder, ias), 'w')

    # initial target position
    time_elapsed = 0
    tar_x = amp_x * sin(2 * pi * freq_x * time_elapsed + phase_x)
    tar_y = amp_y * sin(2 * pi * freq_y * time_elapsed + phase_y)

    ia_radius = 60  # radius of the elliptical IA
    frame_num = 0  # keep track of the frames displayed

    # used a fixation trigger in not dummy mode
    if not dummy_mode:
        fixation = visual.TextStim(win=win, text='+', height=50)
        fixation.draw()
        win.flip()
        el_tracker.sendMessage("FIXATION_TRIGGER")

        eye_used = el_tracker.eyeAvailable()
        if eye_used == 2:
            eye_used = 0

        fixation_time_list = []
        current_eye_pos = [100, 100]

        while True:
            ltype = el_tracker.getNextData()
            if ltype is None:
                pass
            if ltype == FIXUPDATE:
                # send a message to mark the arrival time of a fixation update event
                el_tracker.sendMessage('fixUpdate')
                ldata = el_tracker.getFloatData()
                if ldata.getEye() == eye_used:
                    gaze_pos = ldata.getAverageGaze()
                    current_eye_pos = [
                        gaze_pos[0] - scnWidth / 2, scnHeight / 2 - gaze_pos[1]
                    ]
            if (-25 <= current_eye_pos[0] <= 25) and (-25 <= current_eye_pos[1]
                                                      <= 25):
                fixation_time_list.append(core.getTime())
            else:
                fixation_time_list = []
            if len(fixation_time_list) > 1:
                # if fixation duration > 300 ms, break
                if fixation_time_list[-1] - fixation_time_list[0] > 0.3:
                    break

    target.pos = (tar_x, tar_y)
    target.draw()
    win.flip()
    el_tracker.sendMessage('TARGET_WAIT')
    core.wait(0.5)  # wait 500 ms for moving

    while True:
        # abort the current trial if the tracker is no longer recording
        error = el_tracker.isRecording()
        if error is not pylink.TRIAL_OK:
            el_tracker.sendMessage('tracker_disconnected')
            abort_trial()
            return error

        # check keyboard events
        for keycode, modifier in event.getKeys(modifiers=True):
            # Abort a trial if "ESCAPE" is pressed
            if keycode == 'escape':
                el_tracker.sendMessage('trial_skipped_by_user')
                # clear the screen
                clear_screen(win)
                # abort trial
                abort_trial()
                return pylink.SKIP_TRIAL

            # Terminate the task if Ctrl-c
            if keycode == 'c' and (modifier['ctrl'] is True):
                el_tracker.sendMessage('terminated_by_user')
                terminate_task()
                return pylink.ABORT_EXPT

        # draw the target
        target.pos = (tar_x, tar_y)
        target.draw()
        win.flip()
        frame_num += 1
        flip_time = core.getTime()

        if frame_num == 1:
            # send a message to mark movement onset
            el_tracker.sendMessage('TARGET_ONSET')

            # record a message to let Data Viewer know where to find
            # the dynamic IA file for the current trial.
            ias_path = os.path.join('aoi', ias)
            el_tracker.sendMessage('!V IAREA FILE %s' % ias_path)

            # pursuit start time
            movement_start = flip_time
        else:
            # save the Interest Area info following movement onset
            ia_pars = (-1 * round(
                (pre_frame_time - movement_start) * 1000), -1 * round(
                    (flip_time - movement_start) * 1000) + 1,
                       int(scnWidth / 2.0 + pre_x - ia_radius),
                       int(scnHeight / 2.0 - pre_y - ia_radius),
                       int(scnWidth / 2.0 + pre_x + ia_radius),
                       int(scnHeight / 2.0 - pre_y + ia_radius))

            ia_msg = '%d %d ELLIPSE 1 %d %d %d %d TARGET\n' % ia_pars
            ias_file.write(ia_msg)

            # log the target position after each screen refresh
            tar_pos = (tar_x + int(scnWidth / 2), int(scnHeight / 2) - tar_y)
            tar_pos_msg = '!V TARGET_POS target %d, %d 1 0' % tar_pos
            el_tracker.sendMessage(tar_pos_msg)

            # OPTIONAL - send over another message to request Data Viewer
            # to draw the pursuit target when visualizing the data
            el_tracker.sendMessage('!V CLEAR 128 128 128')
            tar_msg = '!V FIXPOINT 255 0 0 255 0 0 %d %d 50 50' % tar_pos
            el_tracker.sendMessage(tar_msg)

        # keep track of target position and frame timing
        pre_frame_time = flip_time
        pre_x = tar_x
        pre_y = tar_y

        # update target position and draw the target
        time_elapsed = flip_time - movement_start
        tar_x = amp_x * sin(2 * pi * freq_x * time_elapsed + phase_x)
        tar_y = amp_y * sin(2 * pi * freq_y * time_elapsed + phase_y)

        # check for time out
        if time_elapsed >= trial_dur:
            # send over a message to log movement offset
            el_tracker.sendMessage('TARGET_OFFSET')
            print(time_elapsed)
            break

    # clear the screen
    # clear_screen(win)
    win.color = (0, 0, 0)
    win.flip()
    el_tracker.sendMessage('black_screen')
    # send a message to clear the Data Viewer screen as well
    el_tracker.sendMessage('!V CLEAR 128 128 128')
    core.wait(0.5)

    # close the IAS file that contain the dynamic IA definition
    ias_file.close()

    # stop recording; add 100 msec to catch final events before stopping
    pylink.pumpDelay(100)
    el_tracker.stopRecording()

    # record trial variables to the EDF data file, for details, see Data
    # Viewer User Manual, "Protocol for EyeLink Data to Viewer Integration"
    # movement, dur, amp_x, amp_y, phase_x, phase_y, freq_x, freq_y
    el_tracker.sendMessage('!V TRIAL_VAR movement %s' % movement)
    el_tracker.sendMessage('!V TRIAL_VAR max_duration %d' %
                           int(trial_dur * 1000))
    el_tracker.sendMessage('!V TRIAL_VAR amp_x %.02f' % amp_x)
    pylink.msecDelay(4)  # take a break of 4 millisecond
    el_tracker.sendMessage('!V TRIAL_VAR amp_y %.02f' % amp_y)
    el_tracker.sendMessage('!V TRIAL_VAR phase_x %.02f' % (phase_x / pi * 180))
    el_tracker.sendMessage('!V TRIAL_VAR phase_y %.02f' % (phase_y / pi * 180))
    pylink.msecDelay(4)  # take a break of 4 millisecond
    el_tracker.sendMessage('!V TRIAL_VAR freq_x %.02f' % freq_x)
    el_tracker.sendMessage('!V TRIAL_VAR freq_y %.02f' % freq_y)

    # send a 'TRIAL_RESULT' message to mark the end of trial, see Data
    # Viewer User Manual, "Protocol for EyeLink Data to Viewer Integration"
    el_tracker.sendMessage('TRIAL_RESULT %d' % pylink.TRIAL_OK)
Exemple #17
0
def elEndRec(el):
    # Ends the recording; adds 100ms to catch final events
    pl.endRealTimeMode()
    pl.pumpDelay(100)
    el.stopRecording()
Exemple #18
0
 def elEndRec(el):
     # Ends the recording; adds 100ms to catch final events
     pl.endRealTimeMode()
     pl.pumpDelay(100)
     el.stopRecording()
Exemple #19
0
 def stop_recording(self):
     self.recording = False
     pylink.pumpDelay(100) #Allow Windows to clean up while we record additional 100 msec of data
     pylink.getEYELINK().stopRecording()
     while pylink.getEYELINK().getkey():
         pass  
Exemple #20
0
# run through five trials
for trial in range(1, 6):
    #print a message to show the current trial #
    print("Trial #: %d" % trial)

    # log a TRIALID message to mark trial start
    tk.sendMessage('TRIALID %d' % trial)

    tk.startRecording(1, 1, 1, 1)  # start recording

    # interest area definitions
    tk.sendMessage('!V IAREA ELLIPSE 1 0 0 100 100 head')
    tk.sendMessage('!V IAREA RECTANGLE 2 85 85 285 185 body')
    tk.sendMessage('!V IAREA FREEHAND 3 285,125 385,50 335,125 tail')

    pylink.pumpDelay(2000)  # record for 2-sec
    tk.stopRecording()  # stop recording

    # store trial variables in the EDF data file
    tk.sendMessage('!V TRIAL_VAR condition step')
    tk.sendMessage('!V TRIAL_VAR gap_duration 200')
    tk.sendMessage('!V TRIAL_VAR direction Right')

    # send the TRIAL_RESULT message to mark the end of a trial
    tk.sendMessage('TRIAL_RESULT 0')

# retrieve data file
tk.receiveDataFile('seg.edf', 'seg.edf')

# close the link
tk.close()
Exemple #21
0
def runTrial(pars):
    """ pars corresponds to a row in the trial list"""

    # retrieve paramters from the trial list, in the "mask" condition we simply draw mask
    #at the current gaze position
    # in the window' condition we show a moving window that is contingent on gaze
    cond, pic = pars

    # load the image to display
    img = visual.ImageStim(
        win, image=pic,
        size=(scnWidth, scnHeight))  # stretch the image to fill full screen

    # backdroping the image to the Host screen (optional, SLOW and may cause timing problems for some, e.g., MRI tasks)
    # here we use the list comprehension method of Python to convert the RGB values of all pixels into a format
    # that is recognizable by the Host PC, i.e., pixels = [line1, ...lineN], line = [pix1...pixN], pix=(R,G,B)
    #im = Image.open(pic)
    #im = im.resize((scnWidth, scnHeight))
    #w,h = (scnWidth, scnHeight)
    #pixels = im.load()
    #pixels_2transfer = [[pixels[i,j] for i in range(w)] for j in range(h)]
    #tk.sendCommand('clear_screen 0') # clear the host screen
    #tk.bitmapBackdrop(w, h, pixels_2transfer, 0, 0, w, h, 0, 0, pylink.BX_MAXCONTRAST)

    # take the tracker offline
    tk.setOfflineMode()
    pylink.pumpDelay(50)

    # send the standard "TRIALID" message to mark the start of a trial
    # [see Data Viewer User Manual, Section 7: Protocol for EyeLink Data to Viewer Integration]
    tk.sendMessage('TRIALID')

    # record_status_message : show some info on the host PC
    tk.sendCommand("record_status_message 'Task: %s'" % cond)

    # drift check
    try:
        err = tk.doDriftCorrect(scnWidth / 2, scnHeight / 2, 1, 1)
    except:
        tk.doTrackerSetup()

    # uncomment this line to read out calibration/drift-correction results
    #print tk.getCalibrationMessage()

    # start recording, parameters specify whether events and samples are
    # stored in file, and available over the link
    error = tk.startRecording(1, 1, 1, 1)
    pylink.pumpDelay(
        100)  # wait for 100 ms to make sure data of interest is recorded

    #determine which eye(s) are available
    eyeTracked = tk.eyeAvailable()
    if eyeTracked == 2: eyeTracked = 1

    # enable the gaze-contingent aperture in the 'window' condition
    if cond == 'window': gazeWindow.enabled = True
    else: gazeWindow.enabled = False

    # show the image
    img.draw()
    win.flip()

    # this message marks the onset of the stimulus
    # [see Data Viewer User Manual, Section 7: Protocol for EyeLink Data to Viewer Integration]
    tk.sendMessage('image_onset')

    # Message to specify where the image is stored relative to the EDF data file, please see the
    # "Protocol for EyeLink Data to Data Viewer Integration -> Image" section of the Data Viewer manual
    tk.sendMessage('!V IMGLOAD FILL %s' % ('..' + os.sep + pic))

    # show the image indefinitely until a key is pressed
    gazePos = (scnWidth / 2, scnHeight / 2)
    terminate = False
    event.clearEvents(
    )  # clear cached (keyboard/mouse etc.) events, if there is any
    while not terminate:
        # check for keypress to terminate a trial
        if len(event.getKeys()) > 0:  # KEYBOARD
            terminate = True

        if True in tk.getLastButtonPress(
        ):  # GamePad connected to the tracker HOST PC
            terminate = True

        # check for new samples
        dt = tk.getNewestSample()
        if (dt != None):
            if eyeTracked == 1 and dt.isRightSample():
                gazePos = dt.getRightEye().getGaze()
            elif eyeTracked == 0 and dt.isLeftSample():
                gazePos = dt.getLeftEye().getGaze()

        # redraw background image
        img.draw()
        # gaze-contingent window/mask
        if cond == 'window':
            gazeWindow.pos = (gazePos[0] - scnWidth / 2,
                              scnHeight / 2 - gazePos[1])
        else:
            gazeMask.pos = (gazePos[0] - scnWidth / 2,
                            scnHeight / 2 - gazePos[1])
            gazeMask.draw()
        win.flip()

    # clear the subject display
    win.color = [0, 0, 0]
    win.flip()

    # clear the host display, this command is needed if you are backdropping images
    # to the host display (not demonstrated in this script)
    tk.sendCommand('clear_screen 0')

    # send trial variables for Data Viewer integration
    # [see Data Viewer User Manual, Section 7: Protocol for EyeLink Data to Viewer Integration]
    tk.sendMessage('!V TRIAL_VAR task %s' % cond)

    # send interest area messages, if there is any, here we set a rectangular IA, just to
    # illustrate how the IA messages look like
    # format: !V IAREA RECTANGLE <id> <left> <top> <right> <bottom> [label string]
    # [see Data Viewer User Manual, Section 7: Protocol for EyeLink Data to Viewer Integration]
    tk.sendMessage('!V IAREA RECTANGLE %d %d %d %d %d %s' %
                   (1, scnWidth / 2 - 100, scnHeight / 2 - 200,
                    scnWidth / 2 + 200, scnHeight / 2 + 200, 'screenIA'))

    # send a message to mark the end of trial
    # [see Data Viewer User Manual, Section 7: Protocol for EyeLink Data to Viewer Integration]
    tk.sendMessage('TRIAL_RESULT')
    pylink.pumpDelay(100)
    tk.stopRecording()  # stop recording

    # disable the aperture at the end of each trials
    gazeWindow.enabled = False
Exemple #22
0
    # Stop recording
    tk.stopRecording()

    # Send a'TRIAL_RESULT' message to mark the end of trial
    tk.sendMessage('TRIAL_RESULT')


# Run a block of 2 trials, in random order
test_list = trials[:]
random.shuffle(test_list)
for trial in test_list:
    run_trial(trial)

# Step 7: Close the EDF data file
pylink.pumpDelay(100)  # wait for 100 ms to catch session end events
tk.closeDataFile()

# Step 8: Download EDF file to a local folder ('edfData')
msg = 'Downloading EDF file from the EyeLink Host PC ...'
edfTransfer = visual.TextStim(win, text=msg, color='white')
edfTransfer.draw()
win.flip()

if not os.path.exists('edfData'):
    os.mkdir('edfData')
tk.receiveDataFile('video.edf', 'edfData/video_demo.edf')

# Step 9: Close the connection to tracker, close graphics
tk.close()
win.close()
def gc_drift():
    pylink.getEYELINK().sendMessage("GC_failed") #send failure message
    pylink.pumpDelay(100) #Allow Windows to clean up while we record additional 100 msec of data 
    pylink.getEYELINK().stopRecording()
    pylink.getEYELINK().doDriftCorrect(int(sc[0]), int(sc[1]), 0, 0)
Exemple #24
0
    got_SAC = False
    while not got_SAC:
        dt = tk.getNextData()
        if dt is not None:
            ev_data = tk.getFloatData()
            if dt == pylink.ENDSACC:
                amp_x, amp_y = ev_data.getAmplitude()
                # jump out of the loop if a saccade >2 deg is detected
                if hypot(amp_x, amp_y) > 2.0:
                    got_SAC = True

    tk.stopRecording()  # stop recording

    #start playback and draw the saccade trajectory
    tk.startPlayBack()
    pylink.pumpDelay(
        50)  # wait for 50 ms so the Host can switch to playback mode
    smp_pos = []
    smp_timestamp = -32768
    while True:
        smp = tk.getNewestSample()
        if smp is not None:
            if smp.getEye() == 0:
                gaze_pos = smp.getLeftEye().getGaze()
            else:
                gaze_pos = smp.getRightEye().getGaze()
            if smp.getTime() > smp_timestamp:
                smp_pos = smp_pos + [(int(gaze_pos[0]), int(gaze_pos[1]))]
                smp_timestamp = smp.getTime()

                # plot the tracjectory
                if len(smp_pos) > 1:
 #The IMGLOAD command is used to show an overlay image in Data Viewer.  This will code the time that the PictureTrial image should appear.
 pylink.getEYELINK().sendMessage("!V IMGLOAD CENTER  %s" %(scenestim))
 
 #IAPS
 # Send onset time
 offset = int((t-iaps_display.tStart) * 1000)
 msg = str(offset) + " IAPS_Onset"
 pylink.getEYELINK().sendMessage(msg)
 
 #IAPS
 # Send offset time
 offset = int((t-blank.tStart) * 1000)
 msg = str(offset) + " IAPS_Offset"
 pylink.getEYELINK().sendMessage(msg)
 
 pylink.pumpDelay(100) #Allow Windows to clean up while we record additional 100 msec of data
 
 #stop recording
 pylink.getEYELINK().stopRecording()
 
 #VARIABLES
 msg = "!V TRIAL_VAR picture %s" %(scenestim) #scenestim
 pylink.getEYELINK().sendMessage(msg)
 msg = "!V TRIAL_VAR valence %s" %(valence) #valence
 pylink.getEYELINK().sendMessage(msg)
 msg = "!V TRIAL_VAR valmean %s" %(valmean) #valmean
 pylink.getEYELINK().sendMessage(msg)
 msg = "!V TRIAL_VAR arousal %s" %(arousal) #arousal
 pylink.getEYELINK().sendMessage(msg)
 msg = "!V TRIAL_VAR arousalmean %s" %(arousalmean) #arousalmean
 pylink.getEYELINK().sendMessage(msg)
Exemple #26
0
def run_trial(trial_duration, movement_pars):
    """ Run a smooth pursuit trial

    trial_duration: the duration of the pursuit movement
    movement_pars: [ amp_x, amp_y, phase_x, phase_y, freq_x, freq_y]
    The Sinusoidal movement pattern is determined by the following equation
    y(t) = amplitude * sin(frequency * t + phase)
    for a circular or elliptical movements, the phase in x and y directions
    should be pi/2 (direction matters) """

    # Parse the movement pattern parameters
    amp_x, amp_y, phase_x, phase_y, freq_x, freq_y = movement_pars

    # Take the tracker offline
    tk.setOfflineMode()
    pylink.msecDelay(50)

    # Send the standard "TRIALID" message to mark the start of a trial
    tk.sendMessage("TRIALID")

    # Record_status_message : show some info on the Host PC
    tk.sendCommand("record_status_message 'Pursuit demo'")

    # Drift check/correction, params, x, y, draw_target, allow_setup
    try:
        tk.doDriftCorrect(int(SCN_WIDTH / 2 - amp_x), int(SCN_HEIGHT / 2), 1,
                          1)
    except:
        tk.doTrackerSetup()

    # Start recording
    # params: sample_in_file, event_in_file,
    # sampe_over_link, event_over_link (1-yes, 0-no)
    tk.startRecording(1, 1, 1, 1)
    # Wait for 50 ms to cache some samples
    pylink.msecDelay(50)

    # Movement starts here
    win.flip()
    pursuitClock.reset()

    # Send a message to mark movement onset
    tk.sendMessage('Movement_onset')
    while True:
        time_elapsed = pursuitClock.getTime()
        if time_elapsed >= trial_duration:
            break
        else:
            tar_x = amp_x * sin(freq_x * time_elapsed + phase_x)
            tar_y = amp_y * sin(freq_y * time_elapsed + phase_y)
            target.pos = (tar_x, tar_y)
            target.draw()
            win.flip()
            tar_pos = (tar_x + int(SCN_WIDTH / 2), int(SCN_HEIGHT / 2) - tar_y)
            tk.sendMessage('!V TARGET_POS target %d, %d 1 0' % tar_pos)

    # Send a message to mark movement offset
    tk.sendMessage('Movement_offset')
    # clear the subject display
    win.color = (0, 0, 0)
    win.flip()

    # Stop recording
    tk.stopRecording()

    # Send trial variables to record in the EDF data file
    tk.sendMessage("!V TRIAL_VAR amp_x %.2f" % amp_x)
    tk.sendMessage("!V TRIAL_VAR amp_y %.2f" % amp_y)
    tk.sendMessage("!V TRIAL_VAR phase_x %.2f" % phase_x)
    tk.sendMessage("!V TRIAL_VAR phase_y %.2f" % phase_y)
    tk.sendMessage("!V TRIAL_VAR freq_x %.2f" % freq_x)
    tk.sendMessage("!V TRIAL_VAR freq_y %.2f" % freq_y)
    tk.sendMessage("!V TRIAL_VAR duration %.2f" % trial_duration)

    # Send a 'TRIAL_RESULT' message to mark the end of trial
    tk.sendMessage('TRIAL_RESULT')
    pylink.pumpDelay(50)
Exemple #27
0
    tk.stopRecording()

    # Send a'TRIAL_RESULT' message to mark the end of trial
    tk.sendMessage('TRIAL_RESULT')


# Run a block of 2 trials, in random order
test_list = trials[:]
random.shuffle(test_list)
for trial in test_list:
    run_trial(trial)

# Step 7: Close the EDF data file and put the tracker in idle mode
tk.closeDataFile()
tk.setOfflineMode()
pylink.pumpDelay(100)

# Step 8: Download EDF file to a local folder ('edfData')
msg = 'Downloading EDF file from the EyeLink Host PC ...'
edfTransfer = visual.TextStim(win, text=msg, color='white')
edfTransfer.draw()
win.flip()

if not os.path.exists('edfData'):
    os.mkdir('edfData')
tk.receiveDataFile('video.edf', 'edfData/video_demo.edf')

# Step 9: Close the connection to tracker, close graphics
tk.close()
core.quit()
Exemple #28
0
    tk.sendMessage(f'TRIALID {trial}')

    # Start recording
    tk.startRecording(1, 1, 1, 1)

    # Clear the screen to show a white background
    tk.sendMessage('!V CLEAR 255 255 255')
    # Draw a central fixation dot
    tk.sendMessage('!V FIXPOINT 0 0 0 0 0 0 512 384 25 0')
    # Draw the non-target
    tk.sendMessage('!V FIXPOINT 0 0 0 255 255 255 312 384 80 75')
    # Draw the target
    tk.sendMessage('!V FIXPOINT 255 0 0 255 0 0 712 384 80 0')

    # Pretending that we are doing something for 2-sec
    pylink.pumpDelay(2000)

    # Stop recording
    tk.stopRecording()

    # Log a TRIAL_RESULT message to mark trial ends
    tk.sendMessage('TRIAL_RESULT 0')

# Wait for 100 to catch session end events
pylink.msecDelay(100)

# Close the EDF file and download it from the Host PC
tk.closeDataFile()
tk.receiveDataFile('drawing.edf', 'drawing_demo.edf')

# Close the link
Exemple #29
0
# set up the camera and calibrate the tracker at the beginning of each block
tk.doTrackerSetup()

# run a block of trials
testList = trials[:] * 1  # construct the trial list
random.shuffle(testList)  # randomize the trial list

# Looping through the trial list
for t in testList:
    runTrial(t)

# close the EDF data file
tk.setOfflineMode()
tk.closeDataFile()
pylink.pumpDelay(50)

# Get the EDF data and say goodbye
msg.text = 'Data transfering.....'
msg.draw()
win.flip()
tk.receiveDataFile(dataFileName, dataFolder + dataFileName)

#close the link to the tracker
tk.close()

# close the graphics
pylink.closeGraphics()
win.close()
core.quit()
Exemple #30
0
    units="pix"
)
# img.pos += (0, 150)

# ------- INSTRUCTIONS & PRACTICE ------ #
# textScreen("Enter text here.",'space',0)

# close the EDF data file
tk.setOfflineMode()
# send message to eyetracker
tk.sendMessage('TRIALID_1')

# start recording, parameters specify whether events and samples are
# stored in file, and available over the link
error = tk.startRecording(1, 1, 1, 1)
pylink.pumpDelay(100)  # wait for 100 ms to make sure data of interest is recorded

# determine which eye(s) are available
eyeTracked = tk.eyeAvailable()
if eyeTracked == 2:
    eyeTracked = 1

img.draw()

win.flip()

"""
dt = tk.getNewestSample()
if (dt != None):
        if eyeTracked == 1 and dt.isRightSample():
                gazePos = dt.getRightEye().getGaze()
def half_long_new(move_pars, trial_index):
    """
    a function to run half and long movement.
    :param move_pars: a list containing trial parameters. i.e.,
                [movement, start_x, start_y, end_x, end_y]
    :param trial_index: record the order of trial presentation in the task
    :return:
    """
    movement, start_x, start_y, end_x, end_y = move_pars
    x_length = end_x - start_x
    y_length = end_y - start_y

    # get a reference to the currently active EyeLink connection
    el_tracker = pylink.getEYELINK()

    # put the tracker in the offline mode first
    el_tracker.setOfflineMode()

    # send a 'TRIALID' message to mark the start of a trial
    el_tracker.sendMessage('TRIALID %d' % trial_index)

    # record_status_message : show some info on the Host PC
    # here we show how many trial has been tested
    status_msg = 'TRIAL number %d, %s' % (trial_index, movement)
    el_tracker.sendCommand("record_status_message '%s'" % status_msg)

    # draw a reference grid on the Host PC screen
    # For details, See section 25.7 'Drawing Commands' in the
    # EyeLink Programmers Guide manual
    line_hor = (scnWidth / 2.0 - start_x, scnHeight / 2.0,
                scnWidth / 2.0 + start_x, scnHeight / 2.0)
    line_ver = (scnWidth / 2.0, scnHeight / 2.0 - start_y, scnWidth / 2.0,
                scnHeight / 2.0 + start_y)
    el_tracker.sendCommand('clear_screen 0')  # clear the host Display
    el_tracker.sendCommand('draw_line %d %d %d %d 15' % line_hor)
    el_tracker.sendCommand('draw_line %d %d %d %d 15' % line_ver)

    # put tracker in idle/offline mode before recording
    el_tracker.setOfflineMode()

    # Start recording
    # arguments: sample_to_file, events_to_file, sample_over_link,
    # event_over_link (1-yes, 0-no)
    try:
        el_tracker.startRecording(1, 1, 1, 1)
    except RuntimeError as error:
        print("ERROR:", error)
        abort_trial()
        return pylink.TRIAL_ERROR

    # Allocate some time for the tracker to cache some samples
    pylink.pumpDelay(100)

    # Send a message to clear the Data Viewer screen, get it ready for
    # drawing the pictures during visualization
    bgcolor_RGB = (116, 116, 116)
    el_tracker.sendMessage('!V CLEAR %d %d %d' % bgcolor_RGB)

    # open a INTEREAT AREA SET file to make a dynamic IA for the target
    ias = 'IA_%d.ias' % trial_index
    ias_file = open(os.path.join(aoi_folder, ias), 'w')

    # ia_radius = 60  # radius of the elliptical IA
    frame_num = 0  # keep track of the frames displayed

    # used a fixation trigger in not dummy mode
    if not dummy_mode:
        fixation = visual.TextStim(win=win, text='+', height=50)
        fixation.draw()
        win.flip()
        el_tracker.sendMessage("FIXATION_TRIGGER")

        eye_used = el_tracker.eyeAvailable()
        if eye_used == 2:
            eye_used = 0

        fixation_time_list = []
        current_eye_pos = [100, 100]

        while True:
            ltype = el_tracker.getNextData()
            if ltype is None:
                pass
            if ltype == FIXUPDATE:
                # send a message to mark the arrival time of a fixation update event
                el_tracker.sendMessage('fixUpdate')
                ldata = el_tracker.getFloatData()
                if ldata.getEye() == eye_used:
                    gaze_pos = ldata.getAverageGaze()
                    current_eye_pos = [
                        gaze_pos[0] - scnWidth / 2, scnHeight / 2 - gaze_pos[1]
                    ]
            if (-25 <= current_eye_pos[0] <= 25) and (-25 <= current_eye_pos[1]
                                                      <= 25):
                fixation_time_list.append(core.getTime())
            else:
                fixation_time_list = []
            if len(fixation_time_list) > 1:
                # if fixation duration > 300 ms, break
                if fixation_time_list[-1] - fixation_time_list[0] > 0.3:
                    break

    tar_x, tar_y = start_x, start_y
    target.pos = (tar_x, tar_y)
    target.draw()
    win.flip()
    el_tracker.sendMessage('TARGET_WAIT')
    core.wait(0.5)  # wait 500 ms

    pursuitClock.reset()
    time_elapsed = 0

    while True:
        # abort the current trial if the tracker is no longer recording
        error = el_tracker.isRecording()
        if error is not pylink.TRIAL_OK:
            el_tracker.sendMessage('tracker_disconnected')
            abort_trial()
            return error

        frame_num += 1
        flip_time = pursuitClock.getTime()
        # flip_time = core.getTime()
        print('flip_time_a: ' + str(flip_time))

        if frame_num == 1:
            # send a message to mark movement onset
            el_tracker.sendMessage('TARGET_ONSET')

            # record a message to let Data Viewer know where to find
            # the dynamic IA file for the current trial.
            ias_path = os.path.join('aoi', ias)
            el_tracker.sendMessage('!V IAREA FILE %s' % ias_path)

            # pursuit start time
            movement_start = flip_time
            # print('start time ' + str(movement_start))
        else:
            # save the Interest Area info following movement onset
            ia_pars = (-1 * round(
                (pre_frame_time - movement_start) * 1000), -1 * round(
                    (flip_time - movement_start) * 1000) + 1,
                       int(scnWidth / 2.0 + pre_x - ia_radius),
                       int(scnHeight / 2.0 - pre_y - ia_radius),
                       int(scnWidth / 2.0 + pre_x + ia_radius),
                       int(scnHeight / 2.0 - pre_y + ia_radius))

            ia_msg = '%d %d ELLIPSE 1 %d %d %d %d TARGET\n' % ia_pars
            ias_file.write(ia_msg)

            # log the target position after each screen refresh
            tar_pos = (tar_x + int(scnWidth / 2), int(scnHeight / 2) - tar_y)
            tar_pos_msg = '!V TARGET_POS target %d, %d 1 0' % tar_pos
            el_tracker.sendMessage(tar_pos_msg)

            # OPTIONAL - send over another message to request Data Viewer
            # to draw the pursuit target when visualizing the data
            el_tracker.sendMessage('!V CLEAR 128 128 128')
            tar_msg = '!V FIXPOINT 255 0 0 255 0 0 %d %d 50 50' % tar_pos
            el_tracker.sendMessage(tar_msg)

            # keep track of target position and frame timing
        pre_frame_time = flip_time
        pre_x = tar_x
        pre_y = tar_y

        time_elapsed = flip_time - movement_start

        if movement.startswith('Vertical'):
            # 半程:小球从上方 - 中间运动 OR 小球从中间 - 下方
            # 全程:小球从上方 - 下方运动
            if y_length < 0:
                tar_y -= hl_speed
                if tar_y <= end_y:  # 到达终点后,跳出循环
                    el_tracker.sendMessage('TARGET_OFFSET')
                    break
            # 半程:小球从下方 - 中间 OR 小球小中间 - 上方
            # 全程:小球从下方 - 上方运动
            elif y_length > 0:
                tar_y += hl_speed
                if tar_y >= end_y:  # 到达终点后,跳出循环
                    el_tracker.sendMessage('TARGET_OFFSET')
                    break
        elif movement.startswith('Horizontal'):
            # 半程:小球从右方 - 中间运动 OR 小球从中间 - 左方
            # 全程:小球从右方 - 左方
            if x_length < 0:
                tar_x -= hl_speed
                if tar_x <= end_x:  # 到达终点后,跳出循环
                    el_tracker.sendMessage('TARGET_OFFSET')
                    break
            # 半程:小球从左方 - 中间运动 OR 小球从中间 - 右方
            # 全程:小球从左方 - 右方
            elif x_length > 0:
                tar_x += hl_speed
                if tar_x >= end_x:  # 到达终点后,跳出循环
                    el_tracker.sendMessage('TARGET_OFFSET')
                    break
        elif movement.startswith('Tilt'):
            # x_length < 0 and y_length < 0
            # 半程包含两种情况
            # 1. 小球从右上 - 中心
            # 2. 小球小中心 - 左下
            # 全程:小球从右上 - 左下
            if x_length < 0 and y_length < 0:
                tar_x -= hl_speed / 1.4
                tar_y -= hl_speed / 1.4
                if tar_x <= end_x or tar_y <= end_y:  # x或y到达终点后,跳出循环
                    el_tracker.sendMessage('TARGET_OFFSET')
                    break
            # x_length > 0 and y_length < 0
            # 半程包含两种情况
            # 1. 小球从左上 - 中心
            # 2. 小球从中心 - 右下
            # 全程:小球从左上 - 右下
            elif x_length > 0 > y_length:
                tar_x += hl_speed / 1.4
                tar_y -= hl_speed / 1.4
                if tar_x >= end_x or tar_y <= end_y:  # x或y到达终点后,跳出循环
                    el_tracker.sendMessage('TARGET_OFFSET')
                    break
            # x_length > 0 and y_length > 0
            # 半程包含两种情况
            # 1. 小球从左下 - 中心
            # 2. 小球从中心 - 右上
            # 全程:小球从左下 - 右上
            elif x_length > 0 and y_length > 0:
                tar_x += hl_speed / 1.4
                tar_y += hl_speed / 1.4
                if tar_x >= end_x or tar_y >= end_y:  # x或y到达终点后,跳出循环
                    el_tracker.sendMessage('TARGET_OFFSET')
                    break
            # x_length < 0 and y_length > 0
            # 半程包含两种情况
            # 1. 小球从右下 - 中心
            # 2. 小球从中心 - 左上
            # 全程:小球从右下 - 中心
            elif x_length < 0 < y_length:
                tar_x -= hl_speed / 1.4
                tar_y += hl_speed / 1.4
                if tar_x <= end_x or tar_y >= end_y:  # x或y到达终点后,跳出循环
                    el_tracker.sendMessage('TARGET_OFFSET')
                    break

        target.pos = (tar_x, tar_y)
        target.draw()
        win.flip()

    # clear the screen
    # clear_screen(win)
    win.color = (0, 0, 0)
    win.flip()
    el_tracker.sendMessage('black_screen')
    el_tracker.sendMessage('!V CLEAR 128 128 128')
    core.wait(0.5)

    # close the IAS file that contain the dynamic IA definition
    ias_file.close()

    # stop recording; add 100 msec to catch final events before stopping
    pylink.pumpDelay(100)
    el_tracker.stopRecording()

    el_tracker.sendMessage('!V TRIAL_VAR movement %s' % movement)
    el_tracker.sendMessage('!V TRIAL_VAR max_duration %d' %
                           int(time_elapsed * 1000))
    el_tracker.sendMessage('!V TRIAL_VAR start_x %d' % start_x)
    pylink.msecDelay(4)  # take a break of 4 millisecond
    el_tracker.sendMessage('!V TRIAL_VAR start_y %d' % start_y)
    el_tracker.sendMessage('!V TRIAL_VAR end_x %d' % end_x)
    el_tracker.sendMessage('!V TRIAL_VAR end_y %d' % end_y)

    # send a 'TRIAL_RESULT' message to mark the end of trial, see Data
    # Viewer User Manual, "Protocol for EyeLink Data to Viewer Integration"
    el_tracker.sendMessage('TRIAL_RESULT %d' % pylink.TRIAL_OK)
 def stopEyeTracking(self):
     #Ends recording: adds 100 msec of data to catch final events
     pylink.endRealTimeMode()
     pylink.pumpDelay(100)
     self.tracker.stopRecording()
Exemple #33
0
    def get_input_key(self):
        ''' this function will be constantly pools, update the stimuli here is you need
        dynamic calibration target '''

        # this function is constantly checked by the API, so we could update the gabor here
        if self.animatedTarget:
            if self.calTarget is 'spiral':
                self.calibTar.phases -= 0.02
            self.calibTar.draw()
            self.display.flip()

        ky = []
        for keycode, modifier in event.getKeys(modifiers=True):
            k = pylink.JUNK_KEY
            if keycode == 'f1': k = pylink.F1_KEY
            elif keycode == 'f2': k = pylink.F2_KEY
            elif keycode == 'f3': k = pylink.F3_KEY
            elif keycode == 'f4': k = pylink.F4_KEY
            elif keycode == 'f5': k = pylink.F5_KEY
            elif keycode == 'f6': k = pylink.F6_KEY
            elif keycode == 'f7': k = pylink.F7_KEY
            elif keycode == 'f8': k = pylink.F8_KEY
            elif keycode == 'f9': k = pylink.F9_KEY
            elif keycode == 'f10': k = pylink.F10_KEY
            elif keycode == 'pageup': k = pylink.PAGE_UP
            elif keycode == 'pagedown': k = pylink.PAGE_DOWN
            elif keycode == 'up': k = pylink.CURS_UP
            elif keycode == 'down': k = pylink.CURS_DOWN
            elif keycode == 'left': k = pylink.CURS_LEFT
            elif keycode == 'right': k = pylink.CURS_RIGHT
            elif keycode == 'backspace': k = ord('\b')
            elif keycode == 'return':
                k = pylink.ENTER_KEY
                # probe the tracker to see if it's "simulating gaze with mouse"
                # if so, show a warning instead of a blank screen to experimenter
                # do so, only when the tracker is in Camera Setup screen
                if self.tracker.getCurrentMode() is pylink.IN_SETUP_MODE:
                    self.tracker.readRequest('aux_mouse_simulation')
                    pylink.pumpDelay(50)
                    if self.tracker.readReply() is '1':
                        self.msgMouseSim.autoDraw = True
                        self.camImgRect.autoDraw = True
                        self.calibInst.autoDraw = True
                        self.display.flip()
            elif keycode == 'space':
                k = ord(' ')
            elif keycode == 'escape':
                k = pylink.ESC_KEY
            elif keycode == 'tab':
                k = ord('\t')
            elif keycode in string.ascii_letters:
                k = ord(keycode)
            elif k == pylink.JUNK_KEY:
                k = 0

            # plus/equal & minux signs for CR adjustment
            if keycode in ['num_add', 'equal']: k = ord('+')
            if keycode in ['num_subtract', 'minus']: k = ord('-')

            if modifier['alt'] == True: mod = 256
            else: mod = 0

            ky.append(pylink.KeyInput(k, mod))
            #event.clearEvents()
        return ky
Exemple #34
0
tk.openDataFile('smp_test.edf')

# open a window to calibrate the tracker
pylink.openGraphics()
tk.doTrackerSetup()
pylink.closeGraphics()

tk.sendCommand('sample_rate 1000')  # set sampling rate to 1000 Hz

# make sure gaze, HREF, and raw (PUPIL) data is available over the link
tk.sendCommand(
    'link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,PUPIL,HREF,AREA,STATUS,INPUT')

# start recording
error = tk.startRecording(1, 1, 1, 1)
pylink.pumpDelay(100)  # cache some samples for event parsing

# open a plain text file to write the sample data
text_file = open('sample_data.csv', 'w')

t_start = tk.trackerTime()  # current tracker time
smp_time = -1
while True:
    # break after 10 seconds have elapsed
    if tk.trackerTime() - t_start > 3000:
        break

    # poll the latest samples
    dt = tk.getNewestSample()
    if dt is not None:
        if dt.isRightSample():
Exemple #35
0
 def stopEyeTracking(self):
     #Ends recording: adds 100 msec of data to catch final events
     pylink.endRealTimeMode()
     pylink.pumpDelay(100)
     self.tracker.stopRecording()
Exemple #36
0
def block(expInfo, practice, block_number):
    ################ Block structure (after choosing a card) ################
    ##Preparing the next trials
    if not dummyMode:  #if not (dummyMode or practice):
        present_message("calibration")
        #waitForKey(variables.ser)
        tk.doTrackerSetup()  #Calibrating the eyetracker
    core.wait(0.5)
    # io.clearEvents(device_label='all')  #Flushing the buffer. In the final experiment to be replaced by the following line
    variables.ser.reset_input_buffer()

    ## Presenting the "remember: the instructions are..." message (according to mapping)
    if variables.Mapping["KeyYes"] == 'r':
        present_message("explanation_remember_yes_right")
    else:
        present_message("explanation_remember_yes_left")

    core.wait(0.5)
    waitForKey(variables.ser)
    variables.ser.reset_input_buffer()

    ##Creating a randomized List of questions for one block
    card_number = (card_input['card_selected'])
    questionlist = Cards[int(
        card_number
    )]["questions_asked"]  # Getting the List of questions according to the card selected as defined in the dictionary
    random.shuffle(questionlist)  # Randomizing the list
    print(card_input['card_selected'])  #For testing only

    ## Setting number of initial trial to 1
    trial_number = 1

    present_message(
        "start_ready")  # "Einrichtung abgeschlossen. Zum STARTEN: RECHTS"
    waitForKey(variables.ser)
    variables.ser.reset_input_buffer()
    wordlist = []  # flushing wordlist used later for intentionality check
    blockData = []  # For storing data of ALL trials within the block

    ##Setting up a trial-counter and a loop for a block of 10 trials
    while trial_number <= trial_max:
        '''thr1 = Thread(target=readSer)
        thr1.daemon = True
        thr1.start()'''

        ##Defining the word used in the respective trial from the randomized wordlist
        word_number = questionlist[int(
            trial_number - 1
        )]  # Choosing the 1st, 2nd 3rd... word from the (randomized) list. This variable could also be just the variable "word" used below but I defined an extra variable for more clarity.
        word = Questions[int(
            word_number
        )]["text"]  # The translation of the words number into actual text (according to the "Questions" dictionary)
        wordlist.append(word)
        # print(word) #for testing only
        core.wait(0.05)

        tk.setOfflineMode()
        pylink.pumpDelay(50)
        err = tk.startRecording(1, 1, 1, 1)
        pylink.pumpDelay(50)
        eyeTracked = tk.eyeAvailable()
        if eyeTracked == 2:
            eyeTracked = 1

        ##Writing information on the trial into a data dictionary
        trialData = {
            "Subject_No": expInfo['Subject'],
            "BlockNumber": block_number,
            "TrialNumber": trial_number,
            "ITIoffset": round(variables.ITI500, 4),
            "CardNumber": card_number,
            **variables.Mapping  # Adds informations concerning the mapping of the keys for yes/no and the colors of the effects (circles)
        }
        trialData.update(
            trial(word, variables.Mapping["DelayEffectLeft"],
                  variables.Mapping["DelayEffectRight"])
        )  # Calling trial function and appending data from this trial to data dictionary.
        trialData.update(
            {
                "ResponseCorrect":
                checkCorrectness(word_number, card_number, trialData,
                                 variables.Mapping)
            }
        )  # Checking whether response was correct and appending this information (boolean) to data
        trialData.update({
            "TrueResponseWouldBe":
            checkTrueResponse(word_number, card_number, variables.Mapping)
        })
        trialData.update({
            "DelayEffectLeft":
            variables.Mapping["DelayEffectLeft"],
            "DelayEffectRight":
            variables.Mapping["DelayEffectRight"]
        })
        tk.sendCommand('clear_screen 0')
        if not practice:
            #sendBehavData(data)  # Writing data to the behaviour-file and sending it (if not in dummy mode) to the eyetracker.
            blockData.append(trialData)
        tk.stopRecording()
        tk.sendMessage('TRIAL_RESULT')
        pylink.pumpDelay(50)

        trial_number = trial_number + 1

        variables.ser.reset_input_buffer()  # flush the input buffer
        #variables.q.queue.clear()  # flushing the queue

    ## sequence of showing the cards image and asking whether it was the correct one ##
    present_message("card_image")  # Your card was probably:
    core.wait(4)
    image_presented = show_cardimage(card_number,
                                     block_number)  # Present image of card
    core.wait(5)
    if variables.Mapping['KeyYes'] == 'r':
        present_message("card_correct_right")
    else:
        present_message("card_correct_left")
    decision = waitForDecision(variables.ser)
    '''
    ## sequence asking whether there were any unintentional reactions and if so which ##
    if variables.Mapping['KeyYes'] == 'r':
        present_message("unintentional_response_right")
    else:
        present_message("unintentional_response_left")
    core.wait(0.7)
    anyUnintentional = checkMeaning(waitForDecision(variables.ser))
    if anyUnintentional == True:
        #whichUnintentional = "TEST123" # for testing ONLY. Must not be activre oin final experiment
        whichUnintentional = formUnintentional(wordlist)  # Present form asking for which responses were unintentional
    else:
        whichUnintentional = "None"
    '''

    whichTrue = formTrue(
        wordlist)  # Present form asking for which responses were unintentional

    WIN.mouseVisible = False

    if not practice:
        for storedTrialData in blockData:
            storedTrialData.update(
                {
                    "CardImagePresented":
                    image_presented,
                    "CardImageReportedlyCorrect":
                    checkMeaning(decision),
                    # "AnyUnintentionalReactions": anyUnintentional,
                    "whichUnintentionalReactions(form)":
                    whichTrue[blockData.index(storedTrialData)]
                }
            )  # Appending information  to data on the presented card image (int) and whether participant evaluated it to be the correct card (boolean)
            sendBehavData(
                storedTrialData
            )  # Writing data to the behaviour-file and sending it (if not in dummy mode) to the eyetracker.
        #sendBehavData("ENDofBLOCK")
        print(len(blockData))  # For Testing only