def movie_setup(self, window): """Bring movie onto the monitor and connect with eyelink.""" ## GET ACTUAL PARAMETERS FOR THESE scnWidth = self.screen_width scnHeight = self.screen_height window.mouseVisible = False # set up movie movie = visual.MovieStim3(window, self.video_path, flipVert=False, flipHoriz=False, noAudio=True, loop=False) self.movie_x, self.movie_y = movie.size self.align_x = self.screen_width / 2 - self.movie_x / 2 self.align_y = self.screen_height / 2 - self.movie_y / 2 # callcustom calibrationmethod to coordinate screens screen_share = EyeLinkCoreGraphicsPsychoPy(self.tracker, window) pylink.openGraphicsEx(screen_share) frame_time = movie.getCurrentFrameTime # color theme of the calibration display # pylink.setCalibrationColors((255,255,255), (0,0,0)) return window, movie, frame_time
def calibrate(self, cnum=13, paval=1000): '''Calibrates eye-tracker using psychopy stimuli. Parameters cnum -- Number of points to use for calibration. Default is 13. Options: 3, 5, 9, 13 paval -- Pacing of calibraiton, i.e., how long you have to fixate each target. ''' # Generate custom calibration stimuli genv = psychocal.psychocal(self.sres[0], self.sres[1], self.tracker, self.win) if self.realconnect: # Set calibration type calst = 'HV{}'.format(cnum) self.tracker.setCalibrationType(calst) # Set calibraiton pacing self.tracker.setAutoCalibrationPacing(paval) # Execute custom calibration display pylink.openGraphicsEx(genv) # Calibrate self.tracker.doTrackerSetup(self.sres[0], self.sres[1]) else: genv.dummynote()
def drift_correct(self, pos=None, fix_triggered=False): """<DOC> Performs drift correction and falls back to the calibration screen if necessary Keyword arguments: pos -- the coordinate (x,y tuple) of the drift correction dot or None for the display center (default = None) fix_triggered -- a boolean indicating whether drift correction should be fixation triggered, rather than spacebar triggered (default = False) Returns: True on success, False on failure Exceptions: Raises an exceptions.runtime_error on error </DOC>""" # Generate custom calibration stimuli genv = eyelink_display.calibration_display(self.sres[0], self.sres[1], self.tracker, self.win) if fix_triggered: return self.fix_triggered_drift_correction(pos) if pos == None: pos = w / 2, h / 2 pylink.openGraphicsEx(genv) pylink.getEYELINK().doDriftCorrect(pos[0], pos[1], 0, 1)
def initialize_graphics(self): """Opens the PsychoPyCustomDisplay object. Must be called during setup phase. """ self.set_offline_mode() pl.openGraphicsEx(self.genv)
def calibrate(): """ Calibrates eye-tracker using psychopy stimuli. """ # Generate custom calibration stimuli genv = eyelink_display.calibration_display(w,h,tracker,win) if realconnect: # Set calibration type pylink.getEYELINK().setCalibrationType('HV%d'%(cnum)) # Set calibraiton pacing pylink.getEYELINK().setAutoCalibrationPacing(paval) # Execute custom calibration display print '*' * 150 print 'Calibration Mode' print '*' * 150 pylink.openGraphicsEx(genv) # Calibrate pylink.getEYELINK().doTrackerSetup(w, h) else: genv.dummynote()
def calibrate(self, cnum=13, paval=1000): """ Calibrates eye-tracker using psychopy stimuli. :param cnum: Number of points to use for calibration. Options are 3, 5, 9, 13. :type cnum: int :param paval: Pacing of calibration, i.e. how long you have to fixate each target in milliseconds. :type paval: int """ # Generate custom calibration stimuli genv = eyelink_display.calibration_display(self.sres[0], self.sres[1], self.tracker, self.win) if self.realconnect: # Set calibration type calst = 'HV{}'.format(cnum) self.tracker.setCalibrationType(calst) # Set calibraiton pacing self.tracker.setAutoCalibrationPacing(paval) # Execute custom calibration display print '*' * 150 print 'Calibration Mode' print '*' * 150 pylink.openGraphicsEx(genv) # Calibrate self.tracker.doTrackerSetup(self.sres[0], self.sres[1]) else: genv.dummynote()
def calibrate(self, cnum=13, paval=1000): """ Calibrates eye-tracker using psychopy stimuli. :param cnum: Number of points to use for calibration. Options are 3, 5, 9, 13. :type cnum: int :param paval: Pacing of calibration, i.e. how long you have to fixate each target. :type paval: int """ # Generate custom calibration stimuli genv = psychocal.psychocal(self.sres[0], self.sres[1], self.tracker, self.win) if self.realconnect: # Set calibration type calst = 'HV{}'.format(cnum) self.tracker.setCalibrationType(calst) # Set calibraiton pacing self.tracker.setAutoCalibrationPacing(paval) # Execute custom calibration display pylink.openGraphicsEx(genv) # Calibrate self.tracker.doTrackerSetup(self.sres[0], self.sres[1]) else: genv.dummynote()
def _trackerCreate(self): """Helper method not directly called in EyeScript scripts in general configures the Eyelink eyetracker """ try: self.tracker = pylink.EyeLink() except (RuntimeError,AttributeError): self.tracker = EyetrackerStub() #This tells the tracker to use VisionEgg to display Eyelink graphics #including calibration points, camera images, etc. self.eyelinkGraphics = EyeLinkCoreGraphicsVE(self.screen,self.tracker) pylink.openGraphicsEx(self.eyelinkGraphics) self.eyelinkGraphics.setCalibrationColors(self['color'],self['bgcolor']) for key,value in self.iteritems(): command = key.split('_',1) if len(command) > 1 and command[0] == 'tracker': getattr(self.tracker,command[1])(value) if self['heuristic_filter'] == 'off': self.tracker.setHeuristicFilterOff() else: self.tracker.setHeuristicFilterOn() #Set whether beeps should be played during drift correction pylink.setDriftCorrectSounds(*self['setDriftCorrectSounds']) # open the datafile on the operator pc if self['subject'] > 0: self.tracker.openDataFile(self.edffile) self.tracker.sendCommand("screen_pixel_coords = 0 0 %d %d"%(self['screen_size']))
def initTk(expInfo): ## Data host ## if not os.path.exists(edfDataFolder): os.makedirs(edfDataFolder) dataFileName = (expInfo['Subject'] + '.EDF') tk.openDataFile(dataFileName) # add personalized data file header (preamble text) tk.sendCommand("add_file_preamble_text 'Psychopy Waaaaazzzzzzooooo'") tk.sendMessage('Subject_No %s' % expInfo["Subject"]) ## Init parameters genv = EyeLinkCoreGraphicsPsychoPy(tk, WIN) pylink.openGraphicsEx(genv) print("pylink initiated") # For testing tk.setOfflineMode() tk.sendCommand('sample_rate 500') tk.sendCommand("screen_pixel_coords = 0 0 %d %d" % (scnWIDTH - 1, scnHEIGHT - 1)) tk.sendMessage("DISPLAY_COORDS = 0 0 %d %d" % (scnWIDTH - 1, scnHEIGHT - 1)) #tk.sendCommand("hostVer = HV5") tk.sendCommand("recording_parse_type = GAZE") eyelinkVer = tk.getTrackerVersion() if eyelinkVer >= 2: tk.sendCommand('select_parser_configuration 0') hostVer = 0 if eyelinkVer == 3: tvstr = tk.getTrackerVersionString() vindex = tvstr.find("EYELINK CL") hostVer = int(float(tvstr[(vindex + len("EYELINK CL")):].strip())) tk.sendCommand( "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON,INPUT" ) tk.sendCommand( "link_event_filter = LEFT,RIGHT,FIXATION,FIXUPDATE,SACCADE,BLINK,BUTTON,INPUT" ) if hostVer >= 4: tk.sendCommand( "file_sample_data = LEFT,RIGHT,GAZE,GAZERES,PUPIL,HREF,AREA,STATUS,HTARGET,INPUT" ) tk.sendCommand( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,PUPIL,HREF,AREA,STATUS,HTARGET,INPUT" ) else: tk.sendCommand( "file_sample_data = LEFT,RIGHT,GAZE,GAZERES,PUPIL,HREF,AREA,STATUS,INPUT" ) tk.sendCommand( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,PUPIL,HREF,AREA,STATUS,INPUT" ) return dataFileName
def __init__(self, win, fileName, folderName, dummy=False): self.win = win self.text = visual.TextStim(self.win, text="hello") if dummy == False: self.tk = pylink.EyeLink("100.1.1.1") else: self.tk = pylink.EyeLink(None) self.dataFileName = fileName self.dataFolderName = folderName genv = EyeLinkCoreGraphicsPsychoPy(self.tk, win) pylink.openGraphicsEx(genv)
def eyetrackersetup(tk, window): genv = EyeLinkCoreGraphicsPsychoPy(tk, window) pylink.openGraphicsEx(genv) tk.setOfflineMode() tk.sendCommand('sample_rate 500') tk.sendCommand("screen_pixel_coords = 0 0 %d %d" % (config.scnWidth-1, config.scnHeigth-1)) tk.sendMessage("DISPLAY_COORDS = 0 0 %d %d" % (config.scnWidth-1, config.scnHeigth-1)) # specify the calibration type, H3, HV3, HV5, HV13 (HV = horiztonal/vertical), tk.sendCommand("calibration_type = HV9") # tk.setCalibrationType('HV9') also works, see the Pylink manual # specify the proportion of subject display to calibrate/validate (OPTIONAL, useful for wide screen monitors) # tk.sendCommand("calibration_area_proportion 0.85 0.83") # tk.sendCommand("validation_area_proportion 0.85 0.83") # Using a button from the EyeLink Host PC gamepad to accept calibration/dirft check target (optional) # tk.sendCommand("button_function 5 'accept_target_fixation'") # the model of the tracker, 1-EyeLink I, 2-EyeLink II, 3-Newer models (100/1000Plus/DUO) eyelinkVer = tk.getTrackerVersion() # turn off scenelink camera stuff (EyeLink II/I only) if eyelinkVer == 2: tk.sendCommand("scene_camera_gazemap = NO") # Set the tracker to parse Events using "GAZE" (or "HREF") data tk.sendCommand("recording_parse_type = GAZE") # Online parser configuration: 0-> standard/coginitve, 1-> sensitive/psychophysiological # the Parser for EyeLink I is more conservative, see below # [see Eyelink User Manual, Section 4.3: EyeLink Parser Configuration] if eyelinkVer>=2: tk.sendCommand('select_parser_configuration 0') # get Host tracking software version hostVer = 0 if eyelinkVer == 3: tvstr = tk.getTrackerVersionString() vindex = tvstr.find("EYELINK CL") hostVer = int(float(tvstr[(vindex + len("EYELINK CL")):].strip())) # specify the EVENT and SAMPLE data that are stored in EDF or retrievable from the Link # See Section 4 Data Files of the EyeLink user manual tk.sendCommand("file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON,INPUT") tk.sendCommand("link_event_filter = LEFT,RIGHT,FIXATION,FIXUPDATE,SACCADE,BLINK,BUTTON,INPUT") if hostVer>=4: tk.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET,INPUT") tk.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET,INPUT") else: #Add pupil tk.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,INPUT") tk.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,INPUT") return tk
def display_setup(self, window): """Bring movie onto the monitor and connect with eyelink.""" ## TO DO: GET ACTUAL PARAMETERS FOR THESE scnWidth = self.screen_width scnHeight = self.screen_height window.mouseVisible = False screen_share = EyeLinkCoreGraphicsPsychoPy(self.tracker, window) pylink.openGraphicsEx(screen_share) # pylink.setTargetSize(int(surf.get_rect().w/150), int(surf.get_rect().w/500)); return window
def _create_display(self): """ Creates a custom display upon initialization .""" if not self.eyetracker_on or not PYLINK_AVAILABLE: return None display = PsychopyCustomDisplay( self.tracker, self.win, self.settings, ) pylink.openGraphicsEx(display) return display
def calibrate(self, beep=True, prompt=True): """Calibrate the eyetracker Parameters ---------- beep : bool If True, beep when calibration begins. prompt : bool If True, a standard screen prompt will be shown. Returns ------- fname : str | None Filename on the Eyelink of the started data file. Will be None if start is None. Notes ----- At the start of this function, the previous Eyelink file will be closed (if one is open), a new file will be opened, and recording will be started. """ # stop recording and close old file (if open), then start new one if self.recording: self.stop() # open file to record *before* running calibration so it gets saved! fname = self._open_file() if prompt: self._ec.screen_prompt('We will now perform a screen calibration.' '<br><br>Press a button to continue.') fname = None logger.info('EyeLink: Entering calibration') self._ec.flush() # enter Eyetracker camera setup mode, calibration and validation self._ec.flip() cal = _Calibrate(self._ec, beep) pylink.openGraphicsEx(cal) cal.setup_event_handlers() cal.play_beep(0) if not (self.dummy_mode or self._fake_calibration): self._eyelink.doTrackerSetup() cal.release_event_handlers() self._ec.flip() logger.info('EyeLink: Completed calibration') self._ec.flush() self._start_recording() return fname
def _setup(self): """The EyeLink-specific part of the setup process. """ self.version = self.getTrackerVersionString() self.__custom_display = ELCustomDisplay() openGraphicsEx(self.__custom_display) flushGetkeyQueue() self.setOfflineMode() self.sendCommand("screen_pixel_coords = 0 0 {0} {1}".format(P.screen_x-1, P.screen_y-1)) self.setLinkEventFilter("FIXATION,SACCADE,BLINK,LEFT,RIGHT") self.setLinkEventData("GAZE, GAZERES, AREA, VELOCITY") # Enables fix/sacc start events self.openDataFile(self.edf_filename) self.write("DISPLAY_COORDS 0 0 {0} {1}".format(P.screen_x-1, P.screen_y-1)) self.setSaccadeVelocityThreshold(P.saccadic_velocity_threshold) self.setAccelerationThreshold(P.saccadic_acceleration_threshold) self.setMotionThreshold(P.saccadic_motion_threshold) beginRealTimeMode(10)
def calibrate(self): """ Calibrates eyetracker using psychopy stimuli. """ # Generate custom calibration stimuli self.genv = eyelink_display.calibration_display(self.w,self.h,self.tracker,win) if self.realconnect: # Set calibration type pylink.getEYELINK().setCalibrationType('HV%d'%(self.cnum)) # Set calibraiton pacing pylink.getEYELINK().setAutoCalibrationPacing(self.paval) # Execute custom calibration display pylink.openGraphicsEx(self.genv) # Calibrate pylink.getEYELINK().doTrackerSetup(self.w, self.h)
def __init__(self, display, resolution=DISPSIZE, data_file=LOGFILENAME + ".edf", fg_color=FGC, bg_color=BGC, eventdetection=EVENTDETECTION, saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, force_drift_correct=True, pupil_size_mode=EYELINKPUPILSIZEMODE, **args): """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker""" # try to import copy docstring (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, libeyelink) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass global _eyelink # Make sure that we have a valid data file. The local_data_file may # contain a folder. The eyelink_data_file is only a basename, i.e. # without folder. The eyelink_data_file must be at most eight characters # and end with a `.edf` extension. self.local_data_file = data_file self.eyelink_data_file = os.path.basename(data_file) stem, ext = os.path.splitext(self.eyelink_data_file) if len(stem) > 8 or ext.lower() != '.edf': raise Exception( "The EyeLink cannot handle filenames longer than eight " "characters (excluding '.edf' extension).") # properties self.display = display self.fontsize = 18 self.scr = Screen(disptype=DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=["escape", "q"], timeout=1) self.resolution = resolution self.recording = False self.saccade_velocity_treshold = saccade_velocity_threshold self.saccade_acceleration_treshold = saccade_acceleration_threshold self.eye_used = None self.left_eye = 0 self.right_eye = 1 self.binocular = 2 self.pupil_size_mode = pupil_size_mode self.prevsample = (-1, -1) self.prevps = -1 # event detection properties # degrees; maximal distance from fixation start (if gaze wanders beyond # this, fixation has stopped) self.fixtresh = 1.5 # milliseconds; amount of time gaze has to linger within self.fixtresh # to be marked as a fixation self.fixtimetresh = 100 # degrees per second; saccade velocity threshold self.spdtresh = self.saccade_velocity_treshold # degrees per second**2; saccade acceleration threshold self.accthresh = self.saccade_acceleration_treshold self.set_detection_type(eventdetection) # weighted distance, used for determining whether a movement is due to # measurement error (1 is ok, higher is more conservative and will # result in only larger saccades to be detected) self.weightdist = 10 # distance between participant and screen in cm self.screendist = SCREENDIST # distance between participant and screen in cm self.screensize = SCREENSIZE self.pixpercm = (self.resolution[0]/float(self.screensize[0]) + \ self.resolution[1]/float(self.screensize[1])) / 2.0 # only initialize eyelink once if _eyelink == None: try: _eyelink = pylink.EyeLink() except: raise Exception( "Error in libeyelink.libeyelink.__init__(): Failed to " "connect to the tracker!") # determine software version of tracker self.tracker_software_ver = 0 self.eyelink_ver = pylink.getEYELINK().getTrackerVersion() if self.eyelink_ver == 3: tvstr = pylink.getEYELINK().getTrackerVersionString() vindex = tvstr.find("EYELINK CL") self.tracker_software_ver = int(float(tvstr[(vindex + \ len("EYELINK CL")):].strip())) if self.eyelink_ver == 1: self.eyelink_model = 'EyeLink I' elif self.eyelink_ver == 2: self.eyelink_model = 'EyeLink II' elif self.eyelink_ver == 3: self.eyelink_model = 'EyeLink 1000' else: self.eyelink_model = 'EyeLink (model unknown)' # Open graphics self.eyelink_graphics = EyelinkGraphics(self, _eyelink) pylink.openGraphicsEx(self.eyelink_graphics) # Optionally force drift correction. For some reason this must be done # as (one of) the first things, otherwise a segmentation fault occurs. if force_drift_correct: self.send_command('driftcorrect_cr_disable = OFF') # Set pupil-size mode if self.pupil_size_mode == 'area': pylink.getEYELINK().setPupilSizeDiameter(False) elif self.pupil_size_mode == 'diameter': pylink.getEYELINK().setPupilSizeDiameter(True) else: raise Exception( "pupil_size_mode should be 'area' or 'diameter', not %s" \ % self.pupil_size_mode) pylink.getEYELINK().openDataFile(self.eyelink_data_file) pylink.flushGetkeyQueue() pylink.getEYELINK().setOfflineMode() # notify eyelink of display resolution self.send_command("screen_pixel_coords = 0 0 %d %d" % \ (self.resolution[0], self.resolution[1])) # get some configuration stuff if self.eyelink_ver >= 2: self.send_command("select_parser_configuration 0") if self.eyelink_ver == 2: # turn off scenelink camera stuff self.send_command("scene_camera_gazemap = NO") # set EDF file contents (this specifies which data is written to the EDF # file) self.send_command( "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON" ) if self.tracker_software_ver >= 4: self.send_command( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET" ) else: self.send_command( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # set link data (this specifies which data is sent through the link and # thus can be used in gaze contingent displays) self.send_command( "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if self.tracker_software_ver >= 4: self.send_command( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET" ) else: self.send_command( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") # not quite sure what this means (according to Sebastiaan Mathot, it # might be the button that is used to end drift correction?) self.send_command("button_function 5 'accept_target_fixation'") if not self.connected(): raise Exception( "Error in libeyelink.libeyelink.__init__(): Failed to connect " "to the eyetracker!")
def eyelink_child_function( qTo , qFrom , window_size = [200,200] , window_position = [0,0] , stim_display_res = [1920,1080] , stim_display_position = [1920,0] , calibration_display_size = [1920,1080] , calibration_dot_size = 10 , eyelink_ip = '100.1.1.1' , edf_file_name = 'temp.edf' , edf_path = './_Data/temp.edf' , saccade_sound_file = '_Stimuli/stop.wav' , blink_sound_file = '_Stimuli/stop.wav' ): import sdl2 import sdl2.ext import math import OpenGL.GL as gl import sdl2.sdlmixer import pylink import numpy import sys import shutil import subprocess import time import os import array from PIL import Image from PIL import ImageDraw try: import appnope appnope.nope() except: pass byteify = lambda x, enc: x.encode(enc) sdl2.SDL_Init(sdl2.SDL_INIT_VIDEO) window = sdl2.ext.Window("eyelink",size=window_size,position=window_position,flags=sdl2.SDL_WINDOW_SHOWN) windowID = sdl2.SDL_GetWindowID(window.window) windowSurf = sdl2.SDL_GetWindowSurface(window.window) sdl2.ext.fill(windowSurf.contents,sdl2.pixels.SDL_Color(r=0, g=0, b=0, a=255)) window.refresh() for i in range(10): sdl2.SDL_PumpEvents() #to show the windows sdl2.SDL_Init(sdl2.SDL_INIT_AUDIO) sdl2.sdlmixer.Mix_OpenAudio(44100, sdl2.sdlmixer.MIX_DEFAULT_FORMAT, 2, 1024) class Sound: def __init__(self, fileName): self.sample = sdl2.sdlmixer.Mix_LoadWAV(sdl2.ext.compat.byteify(fileName, "utf-8")) self.started = False def play(self): self.channel = sdl2.sdlmixer.Mix_PlayChannel(-1, self.sample, 0) self.started = True def still_playing(self): if self.started: if sdl2.sdlmixer.Mix_Playing(self.channel): return True else: self.started = False return False else: return False saccade_sound = Sound(saccade_sound_file) blink_sound = Sound(blink_sound_file) def exit_safely(): if 'eyelink' in locals(): if eyelink.isRecording()==0: eyelink.stopRecording() eyelink.setOfflineMode() eyelink.closeDataFile() eyelink.receiveDataFile(edf_file_name,'temp.edf') eyelink.close() if os.path.isfile('temp.edf'): shutil.move('temp.edf', edf_path) # if os.path.isfile(edf_path): # subprocess.call('./edf2asc -y ./'+edf_path,shell=True) sys.exit() #process gets hung here if called when showing images from eyelink edf_path = './_Data/temp.edf' #temporary default location, to be changed later when ID is established done = False while not done: try: print '\nAttempting to connect to eyelink (check that wifi is off!)' eyelink = pylink.EyeLink(eyelink_ip) done = True except: while not qTo.empty(): message = qTo.get() if message=='quit': exit_safely() else: qTo.put(message) print 'Eyelink connected' eyelink.sendCommand('select_parser_configuration 0')# 0--> standard (cognitive); 1--> sensitive (psychophysical) eyelink.sendCommand('sample_rate 500') eyelink.setLinkEventFilter("SACCADE,BLINK,FIXATION,LEFT,RIGHT") eyelink.openDataFile(edf_file_name) eyelink.sendCommand("screen_pixel_coords = %d %d %d %d" %(stim_display_res[0]/2 - calibration_display_size[0]/2 , stim_display_res[1]/2 - calibration_display_size[1]/2 , stim_display_res[0]/2 + calibration_display_size[0]/2 , stim_display_res[1]/2 + calibration_display_size[1]/2 )) eyelink.sendMessage("DISPLAY_COORDS 0 0 %d %d" %(stim_display_res[0],stim_display_res[1])) # eyelink.sendCommand("saccade_velocity_threshold = 60") # eyelink.sendCommand("saccade_acceleration_threshold = 19500") class EyeLinkCoreGraphicsPySDL2(pylink.EyeLinkCustomDisplay): def __init__(self): self.__target_beep__ = Sound('_Stimuli/type.wav') self.__target_beep__done__ = Sound('qbeep.wav') self.__target_beep__error__ = Sound('error.wav') if sys.byteorder == 'little': self.byteorder = 1 else: self.byteorder = 0 self.imagebuffer = array.array('I') self.pal = None self.__img__ = None def record_abort_hide(self): pass def play_beep(self,beepid): # if beepid == pylink.DC_TARG_BEEP or beepid == pylink.CAL_TARG_BEEP: if beepid == pylink.CAL_TARG_BEEP: self.__target_beep__.play() elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP: self.__target_beep__error__.play() else:# CAL_GOOD_BEEP or DC_GOOD_BEEP self.__target_beep__done__.play() def clear_cal_display(self): # print 'clear_cal_display' qFrom.put('clear_cal_display') def setup_cal_display(self): # print 'setup_cal_display' qFrom.put('setup_cal_display') def exit_cal_display(self): # print 'exit_cal_display' qFrom.put('exit_cal_display') def erase_cal_target(self): # print 'erase_cal_target' qFrom.put('erase_cal_target') def draw_cal_target(self, x, y): # print 'draw_cal_target' qFrom.put(['draw_cal_target',x,y]) def setup_image_display(self, width, height): # print 'eyelink: setup_image_display' self.img_size = (width,height) return 0 def exit_image_display(self): # print 'eyelink: exit_image_display' pass def image_title(self,text): # print 'eyelink: image_title' pass def set_image_palette(self, r,g,b): # print 'eyelink: set_image_palette' self.imagebuffer = array.array('I') sz = len(r) i = 0 self.pal = [] while i < sz: rf = int(b[i]) gf = int(g[i]) bf = int(r[i]) if self.byteorder: self.pal.append((rf<<16) | (gf<<8) | (bf)) else: self.pal.append((bf<<24) | (gf<<16) | (rf<<8)) #for mac i = i+1 def draw_image_line(self, width, line, totlines,buff): # print 'eyelink: draw_image_line' i = 0 while i < width: if buff[i]>=len(self.pal): buff[i] = len(self.pal)-1 self.imagebuffer.append(self.pal[buff[i]&0x000000FF]) i = i+1 if line == totlines: img = Image.fromstring('RGBX', (width,totlines), self.imagebuffer.tostring()) img = img.convert('RGBA') self.__img__ = img.copy() self.__draw__ = ImageDraw.Draw(self.__img__) self.draw_cross_hair() #inherited method, calls draw_line and draw_losenge qFrom.put(['image',numpy.array(self.__img__)]) self.__img__ = None self.__draw__ = None self.imagebuffer = array.array('I') def get_color_from_index(self,colorindex): if colorindex == pylink.CR_HAIR_COLOR: return (255,255,255,255) elif colorindex == pylink.PUPIL_HAIR_COLOR: return (255,255,255,255) elif colorindex == pylink.PUPIL_BOX_COLOR: return (0,255,0,255) elif colorindex == pylink.SEARCH_LIMIT_BOX_COLOR: return (255,0,0,255) elif colorindex == pylink.MOUSE_CURSOR_COLOR: return (255,0,0,255) else: return (0,0,0,0) def draw_line(self,x1,y1,x2,y2,colorindex): # print 'eyelink: draw_line' if x1<0: x1 = 0 if x2<0: x2 = 0 if y1<0: y1 = 0 if y2<0: y2 = 0 if x1>self.img_size[0]: x1 = self.img_size[0] if x2>self.img_size[0]: x2 = self.img_size[0] if y1>self.img_size[1]: y1 = self.img_size[1] if y2>self.img_size[1]: y2 = self.img_size[1] imr = self.__img__.size x1 = int((float(x1)/float(self.img_size[0]))*imr[0]) x2 = int((float(x2)/float(self.img_size[0]))*imr[0]) y1 = int((float(y1)/float(self.img_size[1]))*imr[1]) y2 = int((float(y2)/float(self.img_size[1]))*imr[1]) color = self.get_color_from_index(colorindex) self.__draw__.line( [(x1,y1),(x2,y2)] , fill=color) def draw_lozenge(self,x,y,width,height,colorindex): # print 'eyelink: draw_lozenge' color = self.get_color_from_index(colorindex) imr = self.__img__.size x=int((float(x)/float(self.img_size[0]))*imr[0]) width=int((float(width)/float(self.img_size[0]))*imr[0]) y=int((float(y)/float(self.img_size[1]))*imr[1]) height=int((float(height)/float(self.img_size[1]))*imr[1]) if width>height: rad = height/2 self.__draw__.line([(x+rad,y),(x+width-rad,y)],fill=color) self.__draw__.line([(x+rad,y+height),(x+width-rad,y+height)],fill=color) clip = (x,y,x+height,y+height) self.__draw__.arc(clip,90,270,fill=color) clip = ((x+width-height),y,x+width,y+height) self.__draw__.arc(clip,270,90,fill=color) else: rad = width/2 self.__draw__.line([(x,y+rad),(x,y+height-rad)],fill=color) self.__draw__.line([(x+width,y+rad),(x+width,y+height-rad)],fill=color) clip = (x,y,x+width,y+width) self.__draw__.arc(clip,180,360,fill=color) clip = (x,y+height-width,x+width,y+height) self.__draw__.arc(clip,360,180,fill=color) def get_mouse_state(self): # pos = pygame.mouse.get_pos() # state = pygame.mouse.get_pressed() # return (pos,state[0]) pass def get_input_key(self): ky=[] while not qTo.empty(): message = qTo.get() if message=='quit': print 'received message to exit' exit_safely() elif message=='voice': ky.append(pylink.KeyInput(32,0)) #voicekey response translated to space keypress (for drift correct) elif message[0]=='keycode': keysym = message[1] keycode = keysym.sym if keycode == sdl2.SDLK_F1: keycode = pylink.F1_KEY elif keycode == sdl2.SDLK_F2: keycode = pylink.F2_KEY elif keycode == sdl2.SDLK_F3: keycode = pylink.F3_KEY elif keycode == sdl2.SDLK_F4: keycode = pylink.F4_KEY elif keycode == sdl2.SDLK_F5: keycode = pylink.F5_KEY elif keycode == sdl2.SDLK_F6: keycode = pylink.F6_KEY elif keycode == sdl2.SDLK_F7: keycode = pylink.F7_KEY elif keycode == sdl2.SDLK_F8: keycode = pylink.F8_KEY elif keycode == sdl2.SDLK_F9: keycode = pylink.F9_KEY elif keycode == sdl2.SDLK_F10: keycode = pylink.F10_KEY elif keycode == sdl2.SDLK_PAGEUP: keycode = pylink.PAGE_UP elif keycode == sdl2.SDLK_PAGEDOWN: keycode = pylink.PAGE_DOWN elif keycode == sdl2.SDLK_UP: keycode = pylink.CURS_UP elif keycode == sdl2.SDLK_DOWN: keycode = pylink.CURS_DOWN elif keycode == sdl2.SDLK_LEFT: keycode = pylink.CURS_LEFT elif keycode == sdl2.SDLK_RIGHT: keycode = pylink.CURS_RIGHT elif keycode == sdl2.SDLK_BACKSPACE: keycode = ord('\b') elif keycode == sdl2.SDLK_RETURN: keycode = pylink.ENTER_KEY elif keycode == sdl2.SDLK_ESCAPE: keycode = pylink.ESC_KEY elif keycode == sdl2.SDLK_TAB: keycode = ord('\t') elif keycode == pylink.JUNK_KEY: keycode = 0 ky.append(pylink.KeyInput(keycode,keysym.mod)) return ky custom_display = EyeLinkCoreGraphicsPySDL2() pylink.openGraphicsEx(custom_display) new_gaze_target = False gaze_target = numpy.array(calibration_display_size)/2.0 real_gaze_target = gaze_target * 2.0 gaze_target_criterion = calibration_dot_size do_sounds = False report_saccades = False report_blinks = False last_message_time = time.time() last_start_blink_time = time.time() while True: sdl2.SDL_PumpEvents() for event in sdl2.ext.get_events(): if event.type==sdl2.SDL_WINDOWEVENT: if (event.window.event==sdl2.SDL_WINDOWEVENT_CLOSE): exit_safely() if not qTo.empty(): message = qTo.get() if message=='quit': exit_safely() elif message[0]=='edf_path': edf_path = message[1] elif message[0]=='do_sounds': do_sounds = message[1] elif message[0]=='report_saccades': report_saccades = message[1] elif message[0]=='report_blinks': report_blinks = message[1] elif message[0]=='send_message': eyelink.sendMessage(message[1]) elif message=='do_drift_correct': if eyelink.isRecording()==0: eyelink.stopRecording() try: error = eyelink.doDriftCorrect(stim_display_res[0]/2,stim_display_res[1]/2,0,1) # print error if error != 27: qFrom.put('drift_correct_complete') eyelink.startRecording(1,1,1,1) #this retuns immediately takes 10-30ms to actually kick in on the tracker else: qFrom.put('do_calibration') except: qFrom.put('do_calibration') elif message[0]=='new_gaze_target': # print message new_gaze_target = True gaze_target = numpy.array(message[1]) gaze_target_criterion = numpy.array(message[2]) # print message # print 'waiting for gaze confirmation' elif message[0]=='accept_trigger': eyelink.accept_trigger() elif message=='do_calibration': do_sounds = False if eyelink.isRecording()==0: eyelink.stopRecording() eyelink.doTrackerSetup() qFrom.put('calibration_complete') if eyelink.isRecording()==0: #stupid, I know, but eyelink.isRecording() returns 0 if it *is* indeed recording! eye_data = eyelink.getNextData() # if eye_data==pylink.SAMPLE_TYPE: # eye_sample = eyelink.getFloatData() # gaze = None # if eye_sample.isRightSample(): # gaze = eye_sample.getRightEye().getGaze() # elif eye_sample.isLeftSample(): # gaze = eye_sample.getLeftEye().getGaze() # if gaze!=None: # if gaze[0]!=-32768.0: # gaze_dist_from_gaze_target = numpy.linalg.norm(numpy.array(gaze)-gaze_target) # if new_gaze_target: # if gaze_dist_from_gaze_target<gaze_target_criterion: # print ['gaze_target_met',gaze,gaze_target_criterion,gaze_target,gaze_dist_from_gaze_target] # qFrom.put(['gaze_target_met',gaze_target]) # new_gaze_target = False # else: # qFrom.put(['gaze_targetNotMet',gaze_target]) # print ['gaze_targetNotMet',gaze,gaze_target,gaze_dist_from_gaze_target,gaze_target_criterion] if eye_data==pylink.ENDSACC: eye_sample = eyelink.getFloatData() gaze_start = eye_sample.getStartGaze() gaze_end = eye_sample.getEndGaze() print ['eyelink: saccade',gaze_start,gaze_end,gaze_target] if (gaze_start[0]!=-32768.0) & (gaze_end[0]!=-32768.0): gaze_dist_from_gaze_target = numpy.linalg.norm(numpy.array(gaze_end)-gaze_target) real_gaze_dist_from_gaze_target = numpy.linalg.norm(numpy.array(gaze_end)-real_gaze_target) print ['real distance', real_gaze_dist_from_gaze_target] if gaze_dist_from_gaze_target<1000: if new_gaze_target: if gaze_dist_from_gaze_target<gaze_target_criterion: # print ['gaze_target_met',gaze_end,gaze_target_criterion,gaze_target,gaze_dist_from_gaze_target] qFrom.put(['gaze_target_met',gaze_target]) new_gaze_target = False elif gaze_dist_from_gaze_target>gaze_target_criterion: if report_saccades: qFrom.put('gaze_target_lost') print('gaze target lost') if (not saccade_sound.still_playing()) and (not blink_sound.still_playing()): if do_sounds: saccade_sound.play() else: if report_saccades: qFrom.put(['smaller_saccade',gaze_dist_from_gaze_target,]) elif eye_data==pylink.STARTBLINK: last_start_blink_time = time.time() elif eye_data==pylink.ENDBLINK: if (time.time()-last_start_blink_time)>.1: if report_blinks: qFrom.put('blink') # print 'eyelink: blink' if (not saccade_sound.still_playing()) and (not blink_sound.still_playing()): if do_sounds: blink_sound.play()
def doSim(self, trial, road, duration, tau, doEyetrack): # Measure sample rate in order to calculate delay buffer sample_rate = self.screen.measure_refresh_rate(2.0) print "Sample rate: " + str(sample_rate) #sample_rate = 60 self.doEyetrack = doEyetrack self.pos_ring = RingBuffer(self.center, int(math.floor(tau * sample_rate)) + 1) print("Ring Buffer:: size: " + str(self.pos_ring.size)) if doEyetrack: import pylink from EyeLinkCoreGraphicsVE import EyeLinkCoreGraphicsVE self.tracker = pylink.EyeLink() if self.tracker == None: print "Error: Eyelink is not connected" sys.exit() genv = EyeLinkCoreGraphicsVE(self.screen, self.tracker) pylink.openGraphicsEx(genv) #Opens the EDF file. edfFileName = "TRIAL" + str(trial) + ".EDF" self.tracker.openDataFile(edfFileName) pylink.flushGetkeyQueue() self.tracker.sendCommand("screen_pixel_coords = 0 0 %d %d" % (VisionEgg.config.VISIONEGG_SCREEN_W, VisionEgg.config.VISIONEGG_SCREEN_H)) tracker_software_ver = 0 eyelink_ver = self.tracker.getTrackerVersion() if eyelink_ver == 3: tvstr = self.tracker.getTrackerVersionString() vindex = tvstr.find("EYELINK CL") tracker_software_ver = int( float(tvstr[(vindex + len("EYELINK CL")):].strip())) if eyelink_ver >= 2: self.tracker.sendCommand("select_parser_configuration 0") if eyelink_ver == 2: #turn off scenelink camera stuff self.tracker.sendCommand("scene_camera_gazemap = NO") else: self.tracker.sendCommand("saccade_velocity_threshold = 35") self.tracker.sendCommand( "saccade_acceleration_threshold = 9500") # set EDF file contents self.tracker.sendCommand( "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON" ) if tracker_software_ver >= 4: self.tracker.sendCommand( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET" ) else: self.tracker.sendCommand( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # set link data (used for gaze cursor) self.tracker.sendCommand( "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if tracker_software_ver >= 4: self.tracker.sendCommand( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET" ) else: self.tracker.sendCommand( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") if not self.doneSetup: self.tracker.doTrackerSetup() self.doneSetup = True else: while 1: try: error = self.tracker.doDriftCorrect( self.screen.size[0] / 2, self.screen.size[1] / 2, 1, 1) if error != 27: # ?? from example break else: self.tracker.doTrackerSetup() except: break self.screen.parameters.bgcolor = 106.0 / 255.0, 147.0 / 255.0, 0.0 # Load road data from file and create an image roadArray = numpy.loadtxt('road' + str(road) + '.txt') # Convert to a Path roadPath = ImagePath.Path( map(lambda xy: (xy[0], xy[1]), roadArray.tolist())) # Use Path to create a plot of the road im = Image.new("RGB", (2000, 100), (50, 50, 50)) draw = ImageDraw.Draw(im) # draw each side of the road separately draw.line(roadPath[:4000], fill=(200, 200, 200)) draw.line(roadPath[4000:], fill=(200, 200, 200)) del draw # Lay out a road texture in the x-z plane roadTexture = Texture(im) del im eye_height = 2.5 vertices = [(-10, -eye_height, 0), (-10, -eye_height, -1000), (10, -eye_height, 0), (10, -eye_height, -1000)] rect = TextureStimulus3D(texture=roadTexture, lowerleft=vertices[0], lowerright=vertices[1], upperleft=vertices[2], upperright=vertices[3]) # We will use these later for our camera transforms self.camera_matrix = ModelView() self.frame_timer = FrameTimer() self.outf = open( 'steersim-' + str(trial) + '-' + str(road) + '-out.txt', 'wb') # Vewport for the road viewport3D = Viewport( screen=self.screen, projection=SimplePerspectiveProjection(fov_x=75.2), camera_matrix=self.camera_matrix, stimuli=[rect]) # Construct a sky sky_l = 0 sky_r = self.screen.size[0] sky_t = self.screen.size[1] sky_b = self.screen.size[1] / 2 sky_vertices = [(sky_l, sky_t, 0), (sky_r, sky_t, 0), (sky_r, sky_b, 0), (sky_l, sky_b, 0)] sky = Rectangle3D(color=(144.0 / 255.0, 190.0 / 255.0, 1.0), vertex1=sky_vertices[0], vertex2=sky_vertices[1], vertex3=sky_vertices[2], vertex4=sky_vertices[3]) wheelTexture = Texture('wheel.png') self.wheel = TextureStimulus(texture=wheelTexture, internal_format=gl.GL_RGBA, position=(self.center, -75), anchor='center') # display the sky in its own viewport viewport2D = Viewport(screen=self.screen) viewport2D.parameters.stimuli = [sky, self.wheel] self.init_state() askText = Text(text='Press a key to start', anchor='center', position=(self.center, self.screen.size[1] / 2)) splash = Viewport(screen=self.screen) splash.parameters.stimuli = [askText] self.askForNext = Presentation(go_duration=(0.5, 'seconds'), viewports=[splash]) self.askForNext.add_controller( None, None, FunctionController(during_go_func=self.wait_for_key)) self.askForNext.parameters.enter_go_loop = True self.askForNext.run_forever() self.simPres = Presentation(go_duration=(duration, 'seconds'), viewports=[viewport3D, viewport2D], handle_event_callbacks=[ (pygame.KEYDOWN, self.check_keypress) ]) self.simPres.add_controller( None, None, FunctionController(during_go_func=self.update)) if doEyetrack: startTime = pylink.currentTime() self.tracker.sendMessage("SYNCTIME %d" % (pylink.currentTime() - startTime)) error = self.tracker.startRecording(1, 1, 1, 1) self.tracker.sendMessage("PRES %d START" % (trial)) self.simPres.go() if doEyetrack: self.tracker.sendMessage("PRES %d END" % (trial)) self.tracker.stopRecording() # File transfer and cleanup! self.tracker.setOfflineMode() pylink.msecDelay(500) #Close the file and transfer it to Display PC self.tracker.closeDataFile() self.tracker.receiveDataFile(edfFileName, edfFileName) self.outf.close() if self.quit: raise SystemExit
def __init__(self, win, clock, sj = "TEST", autoCalibration=True, saccadeSensitivity = HIGH, calibrationType = 'HV9', calibrationTargetColor = WHITE, calibrationBgColor = BLACK, CalibrationSounds = False ): ''' win: psychopy visual window used for the experiment clock: psychopy time clock recording time for whole experiment sj: Subject identifier string (affects EDF filename) autoCalibration: True: enable auto-pacing during calibration saccadeSensitivity: HIGH: Pursuit and neurological work LOW: Cognitive research calibrationType: H3: Horizontal 3-point HV3: 3-point calibration, poor linearization HV5: 5-point calibration, poor at corners HV9: 9-point calibration, best overall calibrationTargetColor and calibrationBgColor: RGB tuple, i.e., (255,0,0) for Red One of: BLACK, WHITE, GRAY calibrationSounds: True: enable feedback sounds when calibrating ''' self.edfFileName = str(sj)+".EDF" print(self.edfFileName) inf = info.RunTimeInfo("J","1",win, refreshTest=None, userProcsDetailed=False) self.screenSize = inf['windowSize_pix'] self.units = inf['windowUnits'] self.monitorName = inf['windowMonitor.name'] monitor = monitors.Monitor(self.monitorName) print("Connecting to eyetracker.") self.tracker = pylink.EyeLink() self.timeCorrection = clock.getTime() - self.tracker.trackerTime() print("Loading custom graphics") genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, self.screenSize) self.tracker.openDataFile(self.edfFileName) pylink.flushGetkeyQueue(); self.tracker.setOfflineMode(); self.tracker.sendCommand("screen_pixel_coords = 0 0 %d %d" %( tuple(self.screenSize) )) self.tracker.setCalibrationType(calibrationType) self.tracker.sendMessage("DISPLAY_COORDS 0 0 %d %d" %( tuple(self.screenSize) )) eyelink_ver = self.tracker.getTrackerVersion() if eyelink_ver == 3: tvstr = self.tracker.getTrackerVersionString() vindex = tvstr.find("EYELINK CL") tracker_software_ver = int(float(tvstr[(vindex + len("EYELINK CL")):].strip())) else: tracker_software_ver = 0 if eyelink_ver>=2: self.tracker.sendCommand("select_parser_configuration %d" %saccadeSensitivity) else: if saccadeSensitivity == HIGH: svt, sat = 22, 5000 else: svt, sat = 30, 9500 self.tracker.sendCommand("saccade_velocity_threshold = %d" %svt) self.tracker.sendCommand("saccade_acceleration_threshold = %d" %sat) if eyelink_ver == 2: #turn off scenelink camera stuff self.tracker.sendCommand("scene_camera_gazemap = NO") # set EDF file contents self.tracker.sendCommand("file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON") if tracker_software_ver>=4: self.tracker.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET") else: self.tracker.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # set link data (used for gaze cursor) self.tracker.sendCommand("link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if tracker_software_ver>=4: self.tracker.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET") else: self.tracker.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") #Set the calibration settings: pylink.setCalibrationColors( calibrationTargetColor, calibrationBgColor) if CalibrationSounds: pylink.setCalibrationSounds("", "", "") pylink.setDriftCorrectSounds("", "off", "off") else: pylink.setCalibrationSounds("off", "off", "off") pylink.setDriftCorrectSounds("off", "off", "off") if autoCalibration: self.tracker.enableAutoCalibration else: self.tracker.disableAutoCalibration win.flip() print("Opening graphics") pylink.openGraphicsEx(genv) print("Begining tracker setup") self.tracker.doTrackerSetup() win.flip()
def eyelinkChildFunction(qTo, qFrom, windowSize=[200, 200], windowPosition=[0, 0], stimDisplayRes=[1920, 1080], calibrationDisplaySize=[1920, 1080], calibrationDotSize=10, eyelinkIp='100.1.1.1', edfFileName='temp.edf', edfPath='./_Data/temp.edf', saccadeSoundFile='_Stimuli/stop.wav', blinkSoundFile='_Stimuli/stop.wav'): import sdl2 import sdl2.ext import math import OpenGL.GL as gl import sdl2.sdlmixer import pylink import numpy import sys import shutil import subprocess import time import os import array from PIL import Image from PIL import ImageDraw try: import appnope appnope.nope() except: pass byteify = lambda x, enc: x.encode(enc) sdl2.SDL_Init(sdl2.SDL_INIT_VIDEO) window = sdl2.ext.Window("eyelink", size=windowSize, position=windowPosition, flags=sdl2.SDL_WINDOW_SHOWN) windowID = sdl2.SDL_GetWindowID(window.window) windowSurf = sdl2.SDL_GetWindowSurface(window.window) sdl2.ext.fill(windowSurf.contents, sdl2.pixels.SDL_Color(r=0, g=0, b=0, a=255)) window.refresh() for i in range(10): sdl2.SDL_PumpEvents() #to show the windows sdl2.SDL_Init(sdl2.SDL_INIT_AUDIO) sdl2.sdlmixer.Mix_OpenAudio(44100, sdl2.sdlmixer.MIX_DEFAULT_FORMAT, 2, 1024) class Sound: def __init__(self, fileName): self.sample = sdl2.sdlmixer.Mix_LoadWAV( sdl2.ext.compat.byteify(fileName, "utf-8")) self.started = False def play(self): self.channel = sdl2.sdlmixer.Mix_PlayChannel(-1, self.sample, 0) self.started = True def stillPlaying(self): if self.started: if sdl2.sdlmixer.Mix_Playing(self.channel): return True else: self.started = False return False else: return False saccadeSound = Sound(saccadeSoundFile) blinkSound = Sound(blinkSoundFile) def exitSafely(): if 'eyelink' in locals(): if eyelink.isRecording() == 0: eyelink.stopRecording() eyelink.setOfflineMode() eyelink.closeDataFile() eyelink.receiveDataFile(edfFileName, 'temp.edf') eyelink.close() if os.path.isfile('temp.edf'): shutil.move('temp.edf', edfPath) # if os.path.isfile(edfPath): # subprocess.call('./edf2asc -y ./'+edfPath,shell=True) sys.exit( ) #process gets hung here if called when showing images from eyelink pylink.setDriftCorrectSounds('off', 'off', 'off') pylink.setCalibrationSounds('off', 'off', 'off') edfPath = './_Data/temp.edf' #temporary default location, to be changed later when ID is established done = False while not done: try: # print '\neyelink: Attempting to connect to eyelink (check that wifi is off!)' eyelink = pylink.EyeLink(eyelinkIp) done = True except: while not qTo.empty(): message = qTo.get() if message == 'quit': exitSafely() else: qTo.put(message) # print 'eyelink: connected' eyelink.sendCommand( 'select_parser_configuration 0' ) # 0--> standard (cognitive); 1--> sensitive (psychophysical) # eyelink.sendCommand('sample_rate 500') eyelink.setLinkEventFilter("SACCADE,BLINK,FIXATION,LEFT,RIGHT") eyelink.openDataFile(edfFileName) eyelink.sendCommand( "screen_pixel_coords = %d %d %d %d" % (stimDisplayRes[0] / 2 - calibrationDisplaySize[0] / 2, stimDisplayRes[1] / 2 - calibrationDisplaySize[1] / 2, stimDisplayRes[0] / 2 + calibrationDisplaySize[0] / 2, stimDisplayRes[1] / 2 + calibrationDisplaySize[1] / 2)) eyelink.sendMessage("DISPLAY_COORDS 0 0 %d %d" % (stimDisplayRes[0], stimDisplayRes[1])) eyelink.sendCommand("saccade_velocity_threshold = 60") eyelink.sendCommand("saccade_acceleration_threshold = 19500") class EyeLinkCoreGraphicsPySDL2(pylink.EyeLinkCustomDisplay): def __init__(self): # self.__target_beep__ = Sound('_Stimuli/type.wav') # self.__target_beep__done__ = Sound('qbeep.wav') # self.__target_beep__error__ = Sound('error.wav') if sys.byteorder == 'little': self.byteorder = 1 else: self.byteorder = 0 self.imagebuffer = array.array('I') self.pal = None self.__img__ = None def record_abort_hide(self): pass def play_beep(self, beepid): pass # if beepid == pylink.DC_TARG_BEEP or beepid == pylink.CAL_TARG_BEEP: # self.__target_beep__.play() # elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP: # self.__target_beep__error__.play() # else:# CAL_GOOD_BEEP or DC_GOOD_BEEP # self.__target_beep__done__.play() def clear_cal_display(self): # # print 'clear_cal_display' qFrom.put('clearCalDisplay') def setup_cal_display(self): # # print 'setup_cal_display' qFrom.put('setupCalDisplay') def exit_cal_display(self): # # print 'exit_cal_display' qFrom.put('exitCalDisplay') def erase_cal_target(self): # # print 'erase_cal_target' qFrom.put('eraseCalTarget') def draw_cal_target(self, x, y): # # print 'draw_cal_target' qFrom.put(['drawCalTarget', x, y]) def setup_image_display(self, width, height): # # print 'eyelink: setup_image_display' self.img_size = (width, height) return (0) def exit_image_display(self): # # print 'eyelink: exit_image_display' pass def image_title(self, text): # # print 'eyelink: image_title' pass def set_image_palette(self, r, g, b): # # print 'eyelink: set_image_palette' self.imagebuffer = array.array('I') sz = len(r) i = 0 self.pal = [] while i < sz: rf = int(b[i]) gf = int(g[i]) bf = int(r[i]) if self.byteorder: self.pal.append((rf << 16) | (gf << 8) | (bf)) else: self.pal.append( (bf << 24) | (gf << 16) | (rf << 8)) #for mac i = i + 1 def draw_image_line(self, width, line, totlines, buff): # # print 'eyelink: draw_image_line' i = 0 while i < width: if buff[i] >= len(self.pal): buff[i] = len(self.pal) - 1 self.imagebuffer.append(self.pal[buff[i] & 0x000000FF]) i = i + 1 if line == totlines: img = Image.fromstring('RGBX', (width, totlines), self.imagebuffer.tostring()) img = img.convert('RGBA') self.__img__ = img.copy() self.__draw__ = ImageDraw.Draw(self.__img__) self.draw_cross_hair( ) #inherited method, calls draw_line and draw_losenge qFrom.put([ 'image', numpy.array( self.__img__.resize([ self.__img__.size[0] * 4, self.__img__.size[1] * 4 ], Image.BICUBIC)) ]) self.__img__ = None self.__draw__ = None self.imagebuffer = array.array('I') def getColorFromIndex(self, colorindex): if colorindex == pylink.CR_HAIR_COLOR: return (255, 255, 255, 255) elif colorindex == pylink.PUPIL_HAIR_COLOR: return (255, 255, 255, 255) elif colorindex == pylink.PUPIL_BOX_COLOR: return (0, 255, 0, 255) elif colorindex == pylink.SEARCH_LIMIT_BOX_COLOR: return (255, 0, 0, 255) elif colorindex == pylink.MOUSE_CURSOR_COLOR: return (255, 0, 0, 255) else: return (0, 0, 0, 0) def draw_line(self, x1, y1, x2, y2, colorindex): # # print 'eyelink: draw_line' if x1 < 0: x1 = 0 if x2 < 0: x2 = 0 if y1 < 0: y1 = 0 if y2 < 0: y2 = 0 if x1 > self.img_size[0]: x1 = self.img_size[0] if x2 > self.img_size[0]: x2 = self.img_size[0] if y1 > self.img_size[1]: y1 = self.img_size[1] if y2 > self.img_size[1]: y2 = self.img_size[1] imr = self.__img__.size x1 = int((float(x1) / float(self.img_size[0])) * imr[0]) x2 = int((float(x2) / float(self.img_size[0])) * imr[0]) y1 = int((float(y1) / float(self.img_size[1])) * imr[1]) y2 = int((float(y2) / float(self.img_size[1])) * imr[1]) color = self.getColorFromIndex(colorindex) self.__draw__.line([(x1, y1), (x2, y2)], fill=color) return 0 def draw_lozenge(self, x, y, width, height, colorindex): # # print 'eyelink: draw_lozenge' color = self.getColorFromIndex(colorindex) imr = self.__img__.size x = int((float(x) / float(self.img_size[0])) * imr[0]) width = int((float(width) / float(self.img_size[0])) * imr[0]) y = int((float(y) / float(self.img_size[1])) * imr[1]) height = int((float(height) / float(self.img_size[1])) * imr[1]) if width > height: rad = height / 2 self.__draw__.line([(x + rad, y), (x + width - rad, y)], fill=color) self.__draw__.line([(x + rad, y + height), (x + width - rad, y + height)], fill=color) clip = (x, y, x + height, y + height) self.__draw__.arc(clip, 90, 270, fill=color) clip = ((x + width - height), y, x + width, y + height) self.__draw__.arc(clip, 270, 90, fill=color) else: rad = width / 2 self.__draw__.line([(x, y + rad), (x, y + height - rad)], fill=color) self.__draw__.line([(x + width, y + rad), (x + width, y + height - rad)], fill=color) clip = (x, y, x + width, y + width) self.__draw__.arc(clip, 180, 360, fill=color) clip = (x, y + height - width, x + width, y + height) self.__draw__.arc(clip, 360, 180, fill=color) return 0 def get_mouse_state(self): # pos = pygame.mouse.get_pos() # state = pygame.mouse.get_pressed() # return (pos,state[0]) pass def get_input_key(self): ky = [] while not qTo.empty(): message = qTo.get() # print 'eyelink: ' # print message if message == 'button': ky.append( pylink.KeyInput(32, 0) ) #button translated to space keypress (for drift correct) # if message=='quit': # # print 'received message to exit' # exitSafely() # el elif message[0] == 'keycode': keysym = message[1] keycode = keysym.sym if keycode == sdl2.SDLK_F1: keycode = pylink.F1_KEY elif keycode == sdl2.SDLK_F2: keycode = pylink.F2_KEY elif keycode == sdl2.SDLK_F3: keycode = pylink.F3_KEY elif keycode == sdl2.SDLK_F4: keycode = pylink.F4_KEY elif keycode == sdl2.SDLK_F5: keycode = pylink.F5_KEY elif keycode == sdl2.SDLK_F6: keycode = pylink.F6_KEY elif keycode == sdl2.SDLK_F7: keycode = pylink.F7_KEY elif keycode == sdl2.SDLK_F8: keycode = pylink.F8_KEY elif keycode == sdl2.SDLK_F9: keycode = pylink.F9_KEY elif keycode == sdl2.SDLK_F10: keycode = pylink.F10_KEY elif keycode == sdl2.SDLK_PAGEUP: keycode = pylink.PAGE_UP elif keycode == sdl2.SDLK_PAGEDOWN: keycode = pylink.PAGE_DOWN elif keycode == sdl2.SDLK_UP: keycode = pylink.CURS_UP elif keycode == sdl2.SDLK_DOWN: keycode = pylink.CURS_DOWN elif keycode == sdl2.SDLK_LEFT: keycode = pylink.CURS_LEFT elif keycode == sdl2.SDLK_RIGHT: keycode = pylink.CURS_RIGHT elif keycode == sdl2.SDLK_BACKSPACE: keycode = ord('\b') elif keycode == sdl2.SDLK_RETURN: keycode = pylink.ENTER_KEY elif keycode == sdl2.SDLK_ESCAPE: keycode = pylink.ESC_KEY elif keycode == sdl2.SDLK_TAB: keycode = ord('\t') elif keycode == pylink.JUNK_KEY: keycode = 0 ky.append(pylink.KeyInput(keycode, keysym.mod)) return ky customDisplay = EyeLinkCoreGraphicsPySDL2() pylink.openGraphicsEx(customDisplay) newGazeTarget = False gazeTarget = numpy.array(calibrationDisplaySize) / 2.0 gazeTargetCriterion = calibrationDotSize doSounds = False reportSaccades = False reportBlinks = False lastMessageTime = time.time() lastStartBlinkTime = time.time() while True: sdl2.SDL_PumpEvents() for event in sdl2.ext.get_events(): if event.type == sdl2.SDL_WINDOWEVENT: if (event.window.event == sdl2.SDL_WINDOWEVENT_CLOSE): exitSafely() if not qTo.empty(): message = qTo.get() if message == 'quit': exitSafely() elif message[0] == 'edfPath': edfPath = message[1] elif message[0] == 'doSounds': doSounds = message[1] elif message[0] == 'reportSaccades': reportSaccades = message[1] elif message[0] == 'reportBlinks': reportBlinks = message[1] elif message[0] == 'sendMessage': eyelink.sendMessage(message[1]) elif message[0] == 'doDriftCorrect': # print 'eyelink: drift correct requested' if eyelink.isRecording() == 0: eyelink.stopRecording() try: location = message[1] error = eyelink.doDriftCorrect(location[0], location[1], 0, 1) # print error # print 'eyelink: drift correct attempted' if error != 27: qFrom.put('driftCorrectComplete') else: qFrom.put('doCalibration') except: qFrom.put('doCalibration') elif message == 'startRecording': # print 'eyelink: received message to begin recording' eyelink.startRecording( 1, 1, 1, 1 ) #this retuns immediately takes 10-30ms to actually kick in on the tracker while not (eyelink.isRecording() == 0): pass # print eyelink.isRecording() qFrom.put('recordingStarted') elif message[0] == 'newGazeTarget': # # print message newGazeTarget = True gazeTarget = numpy.array(message[1]) gazeTargetCriterion = numpy.array(message[2]) # # print message # # print 'waiting for gaze confirmation' elif message[0] == 'acceptTrigger': eyelink.accept_trigger() elif message == 'doCalibration': doSounds = False if eyelink.isRecording() == 0: eyelink.stopRecording() eyelink.doTrackerSetup() # # print 'calComplete' qFrom.put('calibrationComplete') if eyelink.isRecording( ) == 0: #stupid, I know, but eyelink.isRecording() returns 0 if it *is* indeed recording! eyeData = eyelink.getNextData() # if eyeData==pylink.SAMPLE_TYPE: # eyeSample = eyelink.getFloatData() # gaze = None # if eyeSample.isRightSample(): # gaze = eyeSample.getRightEye().getGaze() # elif eyeSample.isLeftSample(): # gaze = eyeSample.getLeftEye().getGaze() # if gaze!=None: # if gaze[0]!=-32768.0: # gazeDistFromGazeTarget = numpy.linalg.norm(numpy.array(gaze)-gazeTarget) # if newGazeTarget: # if gazeDistFromGazeTarget<gazeTargetCriterion: # # print ['gazeTargetMet',gaze,gazeTargetCriterion,gazeTarget,gazeDistFromGazeTarget] # qFrom.put(['gazeTargetMet',gazeTarget]) # newGazeTarget = False # else: # qFrom.put(['gazeTargetNotMet',gazeTarget]) # # print ['gazeTargetNotMet',gaze,gazeTarget,gazeDistFromGazeTarget,gazeTargetCriterion] if eyeData == pylink.ENDSACC: eyeSample = eyelink.getFloatData() gazeStartTime = eyeSample.getStartTime() gazeStart = eyeSample.getStartGaze() gazeEnd = eyeSample.getEndGaze() # # print ['eyelink: saccade',gazeStart,gazeEnd] if (gazeStart[0] != -32768.0) & (gazeEnd[0] != -32768.0): gazeDistFromGazeTarget = numpy.linalg.norm( numpy.array(gazeEnd) - gazeTarget) if gazeDistFromGazeTarget < 1000: if newGazeTarget: # # print [gazeDistFromGazeTarget,gazeTargetCriterion,gazeTarget,gazeEnd] if gazeDistFromGazeTarget < gazeTargetCriterion: # # print ['gazeTargetMet',gazeEnd,gazeTargetCriterion,gazeTarget,gazeDistFromGazeTarget] qFrom.put([ 'gazeTargetMet', gazeTarget, gazeStartTime ]) newGazeTarget = False # # print 'gazeTargetMet' elif gazeDistFromGazeTarget > gazeTargetCriterion: if reportSaccades: qFrom.put(['gazeTargetLost', gazeTarget]) # # print ['gazeTargetLost',gazeTarget] if (not saccadeSound.stillPlaying()) and ( not blinkSound.stillPlaying()): if doSounds: saccadeSound.play() elif eyeData == pylink.STARTBLINK: # lastStartBlinkTime = time.time() # elif eyeData==pylink.ENDBLINK: # if (time.time()-lastStartBlinkTime)>.1: if reportBlinks: qFrom.put('blink') # # print 'eyelink: blink' if (not saccadeSound.stillPlaying()) and ( not blinkSound.stillPlaying()): if doSounds: #blinkSound.play() qFrom.put('blink')
def EyelinkStart(dispsize, Name, win, bits=32, dummy=False, colors=((0, 0, 0), (192, 192, 192))): """ Performs startup routines for the EyeLink 1000 Plus eyetracker. **Author** : Wanja Mössing, WWU Münster | [email protected] \n *July 2017* Parameters: ----------- dispsize : tuple two-item tuple width & height in px Name : string filename for the edf. Doesn't have to, but can, end on '.edf' Maximum length is 8 (without '.edf'). Possible alphanumeric input: 'a-z', 'A-Z', '0-9', '-' & '_' win : window object You necessarily need to open a psychopy window first! bits : integer color-depth, defaults to 32 dummy : boolean Run tracker in dummy mode? colors : Tuple, Optional. Tuple with two RGB triplets Returns ------- 'el' the tracker object. This can be passed to other functions, although they can use pylink.getEYELINK() to find it automatically. """ print('. ') # get filename if '.edf' not in Name.lower(): if len(Name) > 8: print('EDF filename too long! (1-8 characters/letters)') raise SystemExit else: Name += '.edf' elif '.edf' in Name.lower(): if len(Name) > 12: print('EDF filename too long! (1-8 characters/letters)') raise SystemExit print('. ') # initialize tracker object if dummy: el = pylink.EyeLink(None) else: el = pylink.EyeLink("100.1.1.1") print('. ') # Open EDF file on host el.openDataFile(Name) print('. ') # set file preamble currentdir = path.basename(getcwd()) FilePreamble = "add_file_preamble_text \'" FilePreamble += "Eyetracking Dataset AE Busch WWU Muenster Experiment: " FilePreamble += currentdir + "\'" el.sendCommand(FilePreamble) print('. ') # this function calls the custom calibration routine # "EyeLinkCoreGraphicsPsychopy.py" genv = EyeLinkCoreGraphicsPsychoPy(el, win) pylink.openGraphicsEx(genv) print('. ') # set tracker offline to change configuration el.setOfflineMode() print('. ') # flush old keys pylink.flushGetkeyQueue() print('. ') # set sampling rate el.sendCommand('sample_rate 1000') print('. ') # Sets the display coordinate system and sends mesage to that # effect to EDF file; el.sendCommand("screen_pixel_coords = 0 0 %d %d" % (dispsize[0] - 1, dispsize[1] - 1)) el.sendMessage("DISPLAY_COORDS 0 0 %d %d" % (dispsize[0] - 1, dispsize[1] - 1)) print('. ') # select parser configuration for online saccade etc detection ELversion = el.getTrackerVersion() ELsoftVer = 0 if ELversion == 3: tmp = el.getTrackerVersionString() tmpidx = tmp.find('EYELINK CL') ELsoftVer = int(float(tmp[(tmpidx + len("EYELINK CL")):].strip())) if ELversion >= 2: el.sendCommand("select_parser_configuration 0") if ELversion == 2: # turn off scenelink stuff (that's an EL2 front-cam addon...) el.sendCommand("scene_camera_gazemap = NO") else: el.sendCommand("saccade_velocity_threshold = 35") el.sendCommand("saccade_acceleration_threshold = 9500") print('. ') # set EDF file contents AREA el.sendCommand("file_event_filter = LEFT,RIGHT,FIXATION," "SACCADE,BLINK,MESSAGE,BUTTON,INPUT") if ELsoftVer >= 4: el.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,HREF," "AREA,HTARGET,GAZERES,STATUS,INPUT") else: el.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,HREF," "AREA,GAZERES,STATUS,INPUT") print('. ') # set link data (online interaction)AREA el.sendCommand("link_event_filter = LEFT,RIGHT,FIXATION,SACCADE," "BLINK,MESSAGE,BUTTON,INPUT") if ELsoftVer >= 4: el.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA," "HTARGET,STATUS,INPUT") else: el.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA," "STATUS,INPUT") print('. ') # run initial calibration # 13-Pt Grid calibration el.sendCommand('calibration_type = HV13') EyelinkCalibrate(dispsize, el) print('. ') # put tracker in idle mode and wait 50ms, then really start it. el.sendMessage('SETUP_FINISHED') el.setOfflineMode() pylink.msecDelay(500) print('. ') # set to realtime mode pylink.beginRealTimeMode(200) # start recording # note: sending everything over the link *potentially* causes buffer # overflow. However, with modern PCs and EL1000+ this shouldn't be a real # problem el.startRecording(1, 1, 1, 1) # to activate parallel port readout without modifying the FINAL.INI on the # eyelink host pc, uncomment these lines # tyical settings for straight-through TTL cable (data pins -> data pins) el.sendCommand('write_ioport 0xA 0x20') el.sendCommand('create_button 1 8 0x01 0') el.sendCommand('create_button 2 8 0x02 0') el.sendCommand('create_button 3 8 0x04 0') el.sendCommand('create_button 4 8 0x08 0') el.sendCommand('create_button 5 8 0x10 0') el.sendCommand('create_button 6 8 0x20 0') el.sendCommand('create_button 7 8 0x40 0') el.sendCommand('create_button 8 8 0x80 0') el.sendCommand('input_data_ports = 8') el.sendCommand('input_data_masks = 0xFF') # tyical settings for crossover TTL cable (data pins -> status pins) # el.sendCommand('write_ioport 0xA 0x0') # el.sendCommand('create_button 1 9 0x20 1') # el.sendCommand('create_button 2 9 0x40 1') # el.sendCommand('create_button 3 9 0x08 1') # el.sendCommand('create_button 4 9 0x10 1') # el.sendCommand('create_button 5 9 0x80 0') # el.sendCommand('input_data_ports = 9') # el.sendCommand('input_data_masks = 0xFF') # mark end of Eyelinkstart in .edf el.sendMessage('>EndOfEyeLinkStart') # return Eyelink object return el
def initialize_graphics(self): self.set_offline_mode() pl.openGraphicsEx(self.genv)
def __init__(self, resolution, data_file = "default.edf", fg_color = (255, 255, 255), bg_color = (0, 0, 0), saccade_velocity_threshold = 35, saccade_acceleration_threshold = 9500): """ Initializes the connection to the Eyelink Parameters: _resolution: (width, height) tuple _data_file: the name of the EDF file Returns: True on connection success and False on connection failure """ global _eyelink self.data_file = data_file self.resolution = resolution self.recording = False self.saccade_velocity_treshold = saccade_velocity_threshold self.saccade_acceleration_treshold = saccade_acceleration_threshold self.eye_used = None self.left_eye = 0 self.right_eye = 1 self.binocular = 2 # Only initialize the eyelink once if _eyelink == None: try: _eyelink = pylink.EyeLink() except Exception as e: raise exceptions.runtime_error("Failed to connect to the tracker: %s" % e) graphics_env = eyelink_graphics(_eyelink) pylink.openGraphicsEx(graphics_env) pylink.getEYELINK().openDataFile(self.data_file) pylink.flushGetkeyQueue() pylink.getEYELINK().setOfflineMode() # Notify the eyelink of the display resolution self.send_command("screen_pixel_coords = 0 0 %d %d" % (self.resolution[0], self.resolution[1])) # Determine the software version of the tracker self.tracker_software_ver = 0 self.eyelink_ver = pylink.getEYELINK().getTrackerVersion() if self.eyelink_ver == 3: tvstr = pylink.getEYELINK().getTrackerVersionString() vindex = tvstr.find("EYELINK CL") self.tracker_software_ver = int(float(tvstr[(vindex + len("EYELINK CL")):].strip())) # Set some configuration stuff (not sure what the parser and gazemap mean) if self.eyelink_ver >= 2: self.send_command("select_parser_configuration 0") if self.eyelink_ver == 2: #turn off scenelink camera stuff self.send_command("scene_camera_gazemap = NO") else: self.send_command("saccade_velocity_threshold = %d" % self.saccade_velocity_threshold) self.send_command("saccade_acceleration_threshold = %s" % self.saccade_acceleration_threshold) # Set EDF file contents. This specifies which data is written to the EDF file. self.send_command("file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON") if self.tracker_software_ver >= 4: self.send_command("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET") else: self.send_command("file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # Set link data. This specifies which data is sent through the link and thus can # be used in gaze contingent displays self.send_command("link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if self.tracker_software_ver >= 4: self.send_command("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET") else: self.send_command("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") # Not sure what this means. Maybe the button that is used to end drift correction? self.send_command("button_function 5 'accept_target_fixation'") if not self.connected(): raise exceptions.runtime_error("Failed to connect to the eyetracker")
# Calibration type, H3, HV3, HV5, HV13 (HV = horiztonal/vertical) tk.sendCommand("calibration_type = HV9") # Step 4: Open a window for graphics and calibration # always create a monitor object before you run the script customMon = monitors.Monitor('demoMon', width=35, distance=65) customMon.setSizePix((SCN_WIDTH, SCN_HEIGHT)) # Open a window win = visual.Window((SCN_WIDTH, SCN_HEIGHT), fullscr=False, monitor=customMon, units='pix', allowStencil=True) # Require Pylink to use the window we just opened for calibration graphics = EyeLinkCoreGraphicsPsychoPy(tk, win) pylink.openGraphicsEx(graphics) # Step 5: Calibrate the tracker, and run through all the trials calib_prompt = "Press ENTER twice to calibrate the tracker" calib_msg = visual.TextStim( win, text=calib_prompt, color='white', ) calib_msg.draw() win.flip() event.waitKeys() # Calibrate the tracker tk.doTrackerSetup()
self.image = None self.update() def draw_cross_hair(self, surf): print("draw_cross_hair: {}".format(surf)) def alert_printf(self, msg): print("alert: {}".format(msg)) def image_title(self, text): # LEFT, HEAD, RIGHT print("title: {}".format(text)) if __name__ == "__main__": app = QtGui.QApplication(sys.argv) print("Connecting to eyetracker.") tracker = pylink.EyeLink("100.1.1.1") # tracker = pylink.EyeLink(None) w = QEyelink(tracker) app.lastWindowClosed.connect(QtCore.QCoreApplication.instance().quit) w.show() # tracker.setCalibrationType("HV9") # pylink.flushGetkeyQueue() pylink.openGraphicsEx(w) tracker.doTrackerSetup() sys.exit(app.exec_())
def __init__(self, resolution, data_file="default.edf", fg_color=(255, 255, 255), bg_color=(0, 0, 0), saccade_velocity_threshold=35, saccade_acceleration_threshold=9500): """ Initializes the connection to the Eyelink Parameters: _resolution: (width, height) tuple _data_file: the name of the EDF file Returns: True on connection success and False on connection failure """ global _eyelink self.data_file = data_file self.resolution = resolution self.recording = False self.saccade_velocity_treshold = saccade_velocity_threshold self.saccade_acceleration_treshold = saccade_acceleration_threshold self.eye_used = None self.left_eye = 0 self.right_eye = 1 self.binocular = 2 # Only initialize the eyelink once if _eyelink == None: try: _eyelink = pylink.EyeLink() except Exception as e: raise exceptions.runtime_error( "Failed to connect to the tracker: %s" % e) graphics_env = eyelink_graphics(_eyelink) pylink.openGraphicsEx(graphics_env) pylink.getEYELINK().openDataFile(self.data_file) pylink.flushGetkeyQueue() pylink.getEYELINK().setOfflineMode() # Notify the eyelink of the display resolution self.send_command("screen_pixel_coords = 0 0 %d %d" % (self.resolution[0], self.resolution[1])) # Determine the software version of the tracker self.tracker_software_ver = 0 self.eyelink_ver = pylink.getEYELINK().getTrackerVersion() if self.eyelink_ver == 3: tvstr = pylink.getEYELINK().getTrackerVersionString() vindex = tvstr.find("EYELINK CL") self.tracker_software_ver = int( float(tvstr[(vindex + len("EYELINK CL")):].strip())) # Set some configuration stuff (not sure what the parser and gazemap mean) if self.eyelink_ver >= 2: self.send_command("select_parser_configuration 0") if self.eyelink_ver == 2: #turn off scenelink camera stuff self.send_command("scene_camera_gazemap = NO") else: self.send_command("saccade_velocity_threshold = %d" % self.saccade_velocity_threshold) self.send_command("saccade_acceleration_threshold = %s" % self.saccade_acceleration_threshold) # Set EDF file contents. This specifies which data is written to the EDF file. self.send_command( "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON" ) if self.tracker_software_ver >= 4: self.send_command( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET" ) else: self.send_command( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # Set link data. This specifies which data is sent through the link and thus can # be used in gaze contingent displays self.send_command( "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if self.tracker_software_ver >= 4: self.send_command( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET" ) else: self.send_command( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") # Not sure what this means. Maybe the button that is used to end drift correction? self.send_command("button_function 5 'accept_target_fixation'") if not self.connected(): raise exceptions.runtime_error( "Failed to connect to the eyetracker")
def __init__(self, win, clock, sj = "TEST", saccadeSensitivity = HIGH, calibrationType = 'HV9',calibrationTargetColor = WHITE,calibrationBgColor = BLACK, CalibrationSounds = False,screen=(1024,768)): '''win: psychopy visual window used for the experiment clock: psychopy time clock recording time for whole experiment sj: Subject identifier string (affects EDF filename) saccadeSensitivity: HIGH: Pursuit and neurological work LOW: Cognitive research calibrationType: H3: Horizontal 3-point HV3: 3-point calibration, poor linearization HV5: 5-point calibration, poor at corners HV9: 9-point calibration, best overall calibrationTargetColor and calibrationBgColor: RGB tuple, i.e., (255,0,0) for Red One of: BLACK, WHITE, GRAY calibrationSounds: True: enable feedback sounds when calibrating''' self.edfFileName = str(sj)+".EDF" # Subject name only can put 8 characters print("Connecting to eyetracker.") self.tracker = pylink.EyeLink() self.timeCorrection = clock.getTime() - self.tracker.trackerTime() print("Loading custom graphics") #Initializes Experiment Graphics genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, screen) pylink.openGraphicsEx(genv) # opendatafile self.tracker.openDataFile(self.edfFileName) #EyeLink Tracker Configuration pylink.flushGetkeyQueue();# Initializes the key queue used by getkey(). It may be called at any time to get rid any of old keys from the queue. self.tracker.setOfflineMode();#Places EyeLink tracker in off-line (idle) mode. Wait till the tracker has finished the mode transition self.tracker.sendCommand("screen_pixel_coords = 0 0 %d %d"%( tuple(screen) )) self.tracker.setCalibrationType(calibrationType) self.tracker.sendCommand("driftcorrect_cr_disable=OFF") #CF - OFF: turns on drift CORRECT; AUTO: Turns on drift CHECK; ON: Turns off both #self.tracker.sendCommand("generate_default_targets = NO") #self.tracker.sendCommand("calibration_targets = 512,384 512,417 512,351 402,384 622,384 402,417 622,417 402,351 622,351") #self.tracker.sendCommand("validation_targets = 512,384 512,417 512,351 402,384 622,384 402,417 622,417 402,351 622,351") self.tracker.sendMessage("DISPLAY_COORDS 0 0 %d %d"%( tuple(screen) )) eyelink_ver = self.tracker.getTrackerVersion() if eyelink_ver == 3: tvstr = self.tracker.getTrackerVersionString() vindex = tvstr.find("EYELINK CL") tracker_software_ver = int(float(tvstr[(vindex + len("EYELINK CL")):].strip())) else: tracker_software_ver = 0 if eyelink_ver>=2: self.tracker.sendCommand("select_parser_configuration %d" %saccadeSensitivity) else: if saccadeSensitivity == HIGH:svt, sat = 22, 5000 else: svt, sat = 30, 9500 self.tracker.sendCommand("saccade_velocity_threshold = %d" %svt) self.tracker.sendCommand("saccade_acceleration_threshold = %d" %sat) if eyelink_ver == 2: #turn off scenelink camera stuff self.tracker.sendCommand("scene_camera_gazemap = NO") # set EDF file contents self.tracker.setFileEventFilter("LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON") if tracker_software_ver>=4:self.tracker.setFileSampleFilter("LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET") else:self.tracker.setFileSampleFilter("LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # set link data (used for gaze cursor) self.tracker.setLinkEventFilter("LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if tracker_software_ver>=4:self.tracker.setLinkSampleFilter("LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET") else:self.tracker.setLinkSampleFilter("LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") #self.tracker.setAcceptTargetFixationButton(1) # This programs a specific button for use in drift correction. #Set the calibration settings: #pylink.setCalibrationColors(WHITE, BLACK) # Sets the calibration target and background color(foreground_color, background_color) if CalibrationSounds: pylink.setCalibrationSounds("", "", "") pylink.setDriftCorrectSounds("", "off", "off") else: pylink.setCalibrationSounds("off", "off", "off") pylink.setDriftCorrectSounds("off", "off", "off") print("Beginning tracker setup") self.tracker.doTrackerSetup()
def __init__(self, experiment, resolution, data_file=u'default.edf', fg_color=(255, 255, 255), bg_color=(0, 0, 0), saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, force_drift_correct=False): """<DOC> Constructor. Initializes the connection to the Eyelink. Arguments: experiment -- The experiment object. resolution -- A (width, height) tuple. Keyword arguments: data_file -- The name of the EDF file. (default=u'default.edf') fg_color -- The foreground color for the calibration screen. # (default=255,255,255) bg_color -- The background color for the calibration screen. # (default=0,0,0) saccade_velocity_threshold -- The velocity threshold used for # saccade detection. (default=35) saccade_acceleration_threshold -- The acceleration threshold used # for saccade detection. # (default=9500) force_drift_correct -- Indicates whether drift correction # should be enabled. This is useful # only for Eyelink 1000 models, for # which drift correction is disabled # by default. (default=False) Returns: True on connection success and False on connection failure. </DOC>""" global _eyelink stem, ext = os.path.splitext(data_file) if len(stem) > 8 or len(ext) > 4: raise exceptions.runtime_error( \ u'The Eyelink cannot handle filenames longer than 8 characters (plus .EDF extension)') self.experiment = experiment self.data_file = data_file self.resolution = resolution self.recording = False self.cal_beep = True self.cal_target_size = 16 self.experiment.eyelink_esc_pressed = False self.saccade_velocity_treshold = saccade_velocity_threshold self.saccade_acceleration_treshold = saccade_acceleration_threshold self.eye_used = None self.left_eye = 0 self.right_eye = 1 self.binocular = 2 # Only initialize the eyelink once if _eyelink == None: try: _eyelink = pylink.EyeLink() except Exception as e: raise exceptions.runtime_error( \ u'Failed to connect to the tracker: %s' % e) graphics_env = eyelink_graphics(self.experiment, _eyelink) pylink.openGraphicsEx(graphics_env) # Optionally force drift correction. For some reason this must be done # as (one of) the first thingsm otherwise a segmentation fault occurs. if force_drift_correct: self.send_command('driftcorrect_cr_disable = OFF') pylink.getEYELINK().openDataFile(self.data_file) pylink.flushGetkeyQueue() pylink.getEYELINK().setOfflineMode() # Notify the eyelink of the display resolution self.send_command('screen_pixel_coords = 0 0 %d %d' % ( \ self.resolution[0], self.resolution[1])) # Determine the software version of the tracker self.tracker_software_ver = 0 self.eyelink_ver = pylink.getEYELINK().getTrackerVersion() if self.eyelink_ver == 3: tvstr = pylink.getEYELINK().getTrackerVersionString() vindex = tvstr.find("EYELINK CL") self.tracker_software_ver = int(float(tvstr[(vindex + \ len("EYELINK CL")):].strip())) # Some configuration stuff (not sure what the parser and gazemap mean) if self.eyelink_ver >= 2: self.send_command("select_parser_configuration 0") if self.eyelink_ver == 2: #turn off scenelink camera stuff self.send_command("scene_camera_gazemap = NO") else: self.send_command("saccade_velocity_threshold = %d" % \ self.saccade_velocity_threshold) self.send_command("saccade_acceleration_threshold = %s" % \ self.saccade_acceleration_threshold) # Set EDF file contents self.send_command( \ "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON") if self.tracker_software_ver >= 4: self.send_command( \ "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET") else: self.send_command( \ "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # Set link data. This specifies which data is sent through the link and # thus be used in gaze contingent displays self.send_command( \ "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") self.send_command( \ "link_event_data = GAZE,GAZERES,HREF,AREA,VELOCITY,STATUS") if self.tracker_software_ver >= 4: self.send_command( \ "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET") else: self.send_command( \ "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") # Not sure what this means. Maybe the button that is used to end drift # correction? self.send_command("button_function 5 'accept_target_fixation'") # Make sure that we are connected to the eyelink before we start # further communication if not self.connected(): raise exceptions.runtime_error( \ "Failed to connect to the eyetracker")
def EyelinkStart(dispsize, Name, win, bits=32, dummy=False, colors=((0, 0, 0), (192, 192, 192))): """ Performs startup routines for the EyeLink 1000 Plus eyetracker. **Author** : Wanja Mössing, WWU Münster | [email protected] \n *July 2017* Parameters: ----------- dispsize : tuple two-item tuple width & height in px Name : string filename for the edf. Doesn't have to, but can, end on '.edf' Maximum length is 8 (without '.edf'). Possible alphanumeric input: 'a-z', 'A-Z', '0-9', '-' & '_' win : window object You necessarily need to open a psychopy window first! bits : integer color-depth, defaults to 32 dummy : boolean Run tracker in dummy mode? colors : Tuple, Optional. Tuple with two RGB triplets Returns ------- 'el' the tracker object. This can be passed to other functions, although they can use pylink.getEYELINK() to find it automatically. """ print('. ') # get filename if '.edf' not in Name.lower(): if len(Name) > 8: print('EDF filename too long! (1-8 characters/letters)') raise SystemExit else: Name += '.edf' elif '.edf' in Name.lower(): if len(Name) > 12: print('EDF filename too long! (1-8 characters/letters)') raise SystemExit print('. ') # initialize tracker object if dummy: el = pylink.EyeLink(None) else: el = pylink.EyeLink("100.1.1.1") print('. ') # Open EDF file on host el.openDataFile(Name) print('. ') # set file preamble currentdir = path.basename(getcwd()) FilePreamble = "add_file_preamble_text \'" FilePreamble += "Eyetracking Dataset AE Busch WWU Muenster Experiment: " FilePreamble += currentdir + "\'" el.sendCommand(FilePreamble) print('. ') # this function calls the custom calibration routine # "EyeLinkCoreGraphicsPsychopy.py" genv = EyeLinkCoreGraphicsPsychoPy(el, win) pylink.openGraphicsEx(genv) print('. ') # set tracker offline to change configuration el.setOfflineMode() print('. ') # flush old keys pylink.flushGetkeyQueue() print('. ') # set sampling rate el.sendCommand('sample_rate 1000') print('. ') # Sets the display coordinate system and sends mesage to that # effect to EDF file; el.sendCommand("screen_pixel_coords = 0 0 %d %d" % (dispsize[0] - 1, dispsize[1] - 1)) el.sendMessage("DISPLAY_COORDS 0 0 %d %d" % (dispsize[0] - 1, dispsize[1] - 1)) print('. ') # select parser configuration for online saccade etc detection ELversion = el.getTrackerVersion() ELsoftVer = 0 if ELversion == 3: tmp = el.getTrackerVersionString() tmpidx = tmp.find('EYELINK CL') ELsoftVer = int(float(tmp[(tmpidx + len("EYELINK CL")):].strip())) if ELversion >= 2: el.sendCommand("select_parser_configuration 0") if ELversion == 2: # turn off scenelink stuff (that's an EL2 front-cam addon...) el.sendCommand("scene_camera_gazemap = NO") else: el.sendCommand("saccade_velocity_threshold = 35") el.sendCommand("saccade_acceleration_threshold = 9500") print('. ') # set EDF file contents AREA el.sendCommand("file_event_filter = LEFT,RIGHT,FIXATION," "SACCADE,BLINK,MESSAGE,BUTTON,INPUT") if ELsoftVer >= 4: el.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,HREF," "AREA,HTARGET,GAZERES,STATUS,INPUT") else: el.sendCommand("file_sample_data = LEFT,RIGHT,GAZE,HREF," "AREA,GAZERES,STATUS,INPUT") print('. ') # set link data (online interaction)AREA el.sendCommand("link_event_filter = LEFT,RIGHT,FIXATION,SACCADE," "BLINK,MESSAGE,BUTTON,INPUT") if ELsoftVer >= 4: el.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA," "HTARGET,STATUS,INPUT") else: el.sendCommand("link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA," "STATUS,INPUT") print('. ') # run initial calibration # 13-Pt Grid calibration el.sendCommand('calibration_type = HV13') el.doTrackerSetup(dispsize[0], dispsize[1]) # put tracker in idle mode and wait 50ms, then really start it. el.sendMessage('SETUP_FINISHED') el.setOfflineMode() pylink.msecDelay(500) # set to realtime mode pylink.beginRealTimeMode(200) # start recording # note: sending everything over the link *potentially* causes buffer # overflow. However, with modern PCs and EL1000+ this shouldn't be a real # problem el.startRecording(1, 1, 1, 1) # to activate parallel port readout without modifying the FINAL.INI on the # eyelink host pc, uncomment these lines # tyical settings for straight-through TTL cable (data pins -> data pins) el.sendCommand('write_ioport 0xA 0x20') el.sendCommand('create_button 1 8 0x01 0') el.sendCommand('create_button 2 8 0x02 0') el.sendCommand('create_button 3 8 0x04 0') el.sendCommand('create_button 4 8 0x08 0') el.sendCommand('create_button 5 8 0x10 0') el.sendCommand('create_button 6 8 0x20 0') el.sendCommand('create_button 7 8 0x40 0') el.sendCommand('create_button 8 8 0x80 0') el.sendCommand('input_data_ports = 8') el.sendCommand('input_data_masks = 0xFF') # tyical settings for crossover TTL cable (data pins -> status pins) # el.sendCommand('write_ioport 0xA 0x0') # el.sendCommand('create_button 1 9 0x20 1') # el.sendCommand('create_button 2 9 0x40 1') # el.sendCommand('create_button 3 9 0x08 1') # el.sendCommand('create_button 4 9 0x10 1') # el.sendCommand('create_button 5 9 0x80 0') # el.sendCommand('input_data_ports = 9') # el.sendCommand('input_data_masks = 0xFF') # mark end of Eyelinkstart in .edf el.sendMessage('>EndOfEyeLinkStart') # return Eyelink object return el
def __init__(self, display, resolution=settings.DISPSIZE, data_file=settings.LOGFILENAME + ".edf", fg_color=settings.FGC, bg_color=settings.BGC, eventdetection=settings.EVENTDETECTION, saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, blink_threshold=settings.BLINKTHRESH, force_drift_correct=True, pupil_size_mode=settings.EYELINKPUPILSIZEMODE, **args): """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker""" # try to import copy docstring (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, libeyelink) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass global _eyelink # Make sure that we have a valid data file. The local_data_file may # contain a folder. The eyelink_data_file is only a basename, i.e. # without folder. The eyelink_data_file must be at most eight characters # and end with a `.edf` extension. self.local_data_file = data_file self.eyelink_data_file = os.path.basename(data_file) stem, ext = os.path.splitext(self.eyelink_data_file) if len(stem) > 8 or ext.lower() != '.edf': raise Exception( "The EyeLink cannot handle filenames longer than eight " "characters (excluding '.edf' extension).") # properties self.display = display self.fontsize = 18 self.scr = Screen(disptype=settings.DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=["escape", "q"], timeout=1) self.resolution = resolution self.recording = False self.saccade_velocity_treshold = saccade_velocity_threshold self.saccade_acceleration_treshold = saccade_acceleration_threshold self.blink_threshold = blink_threshold self.eye_used = None self.left_eye = 0 self.right_eye = 1 self.binocular = 2 self.pupil_size_mode = pupil_size_mode self.prevsample = (-1, -1) self.prevps = -1 # event detection properties # degrees; maximal distance from fixation start (if gaze wanders beyond # this, fixation has stopped) self.fixtresh = 1.5 # milliseconds; amount of time gaze has to linger within self.fixtresh # to be marked as a fixation self.fixtimetresh = 100 # degrees per second; saccade velocity threshold self.spdtresh = self.saccade_velocity_treshold # degrees per second**2; saccade acceleration threshold self.accthresh = self.saccade_acceleration_treshold self.set_detection_type(eventdetection) # weighted distance, used for determining whether a movement is due to # measurement error (1 is ok, higher is more conservative and will # result in only larger saccades to be detected) self.weightdist = 10 # distance between participant and screen in cm self.screendist = settings.SCREENDIST # distance between participant and screen in cm self.screensize = settings.SCREENSIZE self.pixpercm = (self.resolution[0]/float(self.screensize[0]) + \ self.resolution[1]/float(self.screensize[1])) / 2.0 # only initialize eyelink once if _eyelink == None: try: _eyelink = pylink.EyeLink() except: raise Exception( "Error in libeyelink.libeyelink.__init__(): Failed to " "connect to the tracker!") # determine software version of tracker self.tracker_software_ver = 0 self.eyelink_ver = pylink.getEYELINK().getTrackerVersion() if self.eyelink_ver == 3: tvstr = pylink.getEYELINK().getTrackerVersionString() vindex = tvstr.find("EYELINK CL") self.tracker_software_ver = int(float(tvstr[(vindex + \ len("EYELINK CL")):].strip())) if self.eyelink_ver == 1: self.eyelink_model = 'EyeLink I' elif self.eyelink_ver == 2: self.eyelink_model = 'EyeLink II' elif self.eyelink_ver == 3: self.eyelink_model = 'EyeLink 1000' else: self.eyelink_model = 'EyeLink (model unknown)' # Open graphics self.eyelink_graphics = EyelinkGraphics(self, _eyelink) pylink.openGraphicsEx(self.eyelink_graphics) # Optionally force drift correction. For some reason this must be done # as (one of) the first things, otherwise a segmentation fault occurs. if force_drift_correct: try: self.send_command('driftcorrect_cr_disable = OFF') except: print('Failed to force drift correction (EyeLink 1000 only)') # Set pupil-size mode if self.pupil_size_mode == 'area': pylink.getEYELINK().setPupilSizeDiameter(False) elif self.pupil_size_mode == 'diameter': pylink.getEYELINK().setPupilSizeDiameter(True) else: raise Exception( "pupil_size_mode should be 'area' or 'diameter', not %s" \ % self.pupil_size_mode) pylink.getEYELINK().openDataFile(self.eyelink_data_file) pylink.flushGetkeyQueue() pylink.getEYELINK().setOfflineMode() # notify eyelink of display resolution self.send_command("screen_pixel_coords = 0 0 %d %d" % \ (self.resolution[0], self.resolution[1])) # get some configuration stuff if self.eyelink_ver >= 2: self.send_command("select_parser_configuration 0") if self.eyelink_ver == 2: # turn off scenelink camera stuff self.send_command("scene_camera_gazemap = NO") # set EDF file contents (this specifies which data is written to the EDF # file) self.send_command( "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON" ) if self.tracker_software_ver >= 4: self.send_command( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET" ) else: self.send_command( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # set link data (this specifies which data is sent through the link and # thus can be used in gaze contingent displays) self.send_command( "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if self.tracker_software_ver >= 4: self.send_command( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET" ) else: self.send_command( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") # not quite sure what this means (according to Sebastiaan Mathot, it # might be the button that is used to end drift correction?) self.send_command("button_function 5 'accept_target_fixation'") if not self.connected(): raise Exception( "Error in libeyelink.libeyelink.__init__(): Failed to connect " "to the eyetracker!")
def __init__(self, win, clock, sj="TEST", autoCalibration=True, saccadeSensitivity=HIGH, calibrationType='HV9', calibrationTargetColor=WHITE, calibrationBgColor=BLACK, CalibrationSounds=False): ''' win: psychopy visual window used for the experiment clock: psychopy time clock recording time for whole experiment sj: Subject identifier string (affects EDF filename) autoCalibration: True: enable auto-pacing during calibration saccadeSensitivity: HIGH: Pursuit and neurological work LOW: Cognitive research calibrationType: H3: Horizontal 3-point HV3: 3-point calibration, poor linearization HV5: 5-point calibration, poor at corners HV9: 9-point calibration, best overall calibrationTargetColor and calibrationBgColor: RGB tuple, i.e., (255,0,0) for Red One of: BLACK, WHITE, GRAY calibrationSounds: True: enable feedback sounds when calibrating ''' self.edfFileName = str(sj) + ".EDF" print(self.edfFileName) inf = info.RunTimeInfo("J", "1", win, refreshTest=None, userProcsDetailed=False) self.screenSize = inf['windowSize_pix'] self.units = inf['windowUnits'] self.monitorName = inf['windowMonitor.name'] monitor = monitors.Monitor(self.monitorName) print("Connecting to eyetracker.") self.tracker = pylink.EyeLink() self.timeCorrection = clock.getTime() - self.tracker.trackerTime() print("Loading custom graphics") genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, self.screenSize) self.tracker.openDataFile(self.edfFileName) pylink.flushGetkeyQueue() self.tracker.setOfflineMode() self.tracker.sendCommand("screen_pixel_coords = 0 0 %d %d" % (tuple(self.screenSize))) self.tracker.setCalibrationType(calibrationType) self.tracker.sendMessage("DISPLAY_COORDS 0 0 %d %d" % (tuple(self.screenSize))) eyelink_ver = self.tracker.getTrackerVersion() if eyelink_ver == 3: tvstr = self.tracker.getTrackerVersionString() vindex = tvstr.find("EYELINK CL") tracker_software_ver = int( float(tvstr[(vindex + len("EYELINK CL")):].strip())) else: tracker_software_ver = 0 if eyelink_ver >= 2: self.tracker.sendCommand("select_parser_configuration %d" % saccadeSensitivity) else: if saccadeSensitivity == HIGH: svt, sat = 22, 5000 else: svt, sat = 30, 9500 self.tracker.sendCommand("saccade_velocity_threshold = %d" % svt) self.tracker.sendCommand("saccade_acceleration_threshold = %d" % sat) if eyelink_ver == 2: #turn off scenelink camera stuff self.tracker.sendCommand("scene_camera_gazemap = NO") # set EDF file contents self.tracker.sendCommand( "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON" ) if tracker_software_ver >= 4: self.tracker.sendCommand( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET" ) else: self.tracker.sendCommand( "file_sample_data = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # set link data (used for gaze cursor) self.tracker.sendCommand( "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if tracker_software_ver >= 4: self.tracker.sendCommand( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET" ) else: self.tracker.sendCommand( "link_sample_data = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") #Set the calibration settings: pylink.setCalibrationColors(calibrationTargetColor, calibrationBgColor) if CalibrationSounds: pylink.setCalibrationSounds("", "", "") pylink.setDriftCorrectSounds("", "off", "off") else: pylink.setCalibrationSounds("off", "off", "off") pylink.setDriftCorrectSounds("off", "off", "off") if autoCalibration: self.tracker.enableAutoCalibration else: self.tracker.disableAutoCalibration win.flip() print("Opening graphics") pylink.openGraphicsEx(genv) print("Begining tracker setup") self.tracker.doTrackerSetup() win.flip()
mon = monitors.Monitor('myMac15', width=53.0, distance=70.0) mon.setSizePix((SCN_WIDTH, SCN_HEIGHT)) win = visual.Window((SCN_WIDTH, SCN_HEIGHT), fullscr=False, monitor=mon, units='pix') # instantiate a custom CoreGraphics environment in PsychoPy genv = EyeLinkCoreGraphicsPsychoPy(tk, win) # Configure the calibration target # To use a dynamic spiral as the calibration target; otherwise, # calTarget could be a "circle", a "picture", or a "movie" clip. # To use a picture or movie clip as the calibration target, # you need to provide movieTargetFile or pictureTargetFile genv.calTarget = 'spiral' # provide a movie clip if genv.calTarget = 'movie' # genv.movieTargetFile = 'starjumps100.avi' genv.targetSize = 32 # Open the calibration window pylink.openGraphicsEx(genv) # Set up the camera and calibrate the tracker tk.doTrackerSetup() # Close the connection and close the PsychoPy window tk.close() win.close() core.quit()
def calibrate(tracker, reward, cnum=13, paval=1000, target_color=1, target_size=1.0, target_image=None, use_gabor=False, pulse_dot=False, manual_calibration=False, movie_stim=None): """ Calibrates eye-tracker using psychopy stimuli. :param tracker: Tracker object to communicate with eyelink :param reward: Reward object to disperse liquid reward through a serial port :param cnum: Number of points to use for calibration. Options are 3, 5, 9, 13. :type cnum: int :param paval: Pacing of calibration, i.e. how long you have to fixate each target in milliseconds. :type paval: int :param target_color: Color of calibration target :type target_color: int or (float, float, float) :param target_size: Radius of calibration target in pixels :type target_size: float """ # Generate custom calibration stimuli genv = Calibration(tracker.sres[0], tracker.sres[1], tracker.tracker, tracker.win, reward, target_color, target_size, target_image, use_gabor, movie_stim) if tracker.realconnect: # Set calibration type calst = 'HV{}'.format(cnum) tracker.tracker.setCalibrationType(calst) # Set calibration pacing if manual_calibration: print("Using manual calibration") tracker.send_command("remote_cal_enable = 1") tracker.send_command("key_function 1 'remote_cal_target 1'") tracker.send_command("key_function 2 'remote_cal_target 2'") tracker.send_command("key_function 3 'remote_cal_target 3'") tracker.send_command("key_function 4 'remote_cal_target 4'") tracker.send_command("key_function 5 'remote_cal_target 5'") tracker.send_command("key_function 6 'remote_cal_target 6'") tracker.send_command("key_function 7 'remote_cal_target 7'") tracker.send_command("key_function 8 'remote_cal_target 8'") tracker.send_command("key_function 9 'remote_cal_target 9'") tracker.send_command("key_function y 'remote_cal_complete'") else: print("Using autmoatic calibration") tracker.send_command("remote_cal_enable = 0") tracker.tracker.setAutoCalibrationPacing(paval) # Execute custom calibration display print('*' * 150) print('Calibration Mode') print('*' * 150) pylink.openGraphicsEx(genv) # Calibrate # TODO: We only exit from this mode by pressing "ESC" on the keyboard tracker.tracker.doTrackerSetup(tracker.sres[0], tracker.sres[1]) else: genv.dummynote()
def __init__(self, win, clock, sj="TEST", saccadeSensitivity=HIGH, calibrationType='HV9', calibrationTargetColor=WHITE, calibrationBgColor=BLACK, CalibrationSounds=False, screen=(1024, 768)): '''win: psychopy visual window used for the experiment clock: psychopy time clock recording time for whole experiment sj: Subject identifier string (affects EDF filename) saccadeSensitivity: HIGH: Pursuit and neurological work LOW: Cognitive research calibrationType: H3: Horizontal 3-point HV3: 3-point calibration, poor linearization HV5: 5-point calibration, poor at corners HV9: 9-point calibration, best overall calibrationTargetColor and calibrationBgColor: RGB tuple, i.e., (255,0,0) for Red One of: BLACK, WHITE, GRAY calibrationSounds: True: enable feedback sounds when calibrating''' self.edfFileName = str( sj) + ".EDF" # Subject name only can put 8 characters print("Connecting to eyetracker.") self.tracker = pylink.EyeLink() self.timeCorrection = clock.getTime() - self.tracker.trackerTime() print("Loading custom graphics") #Initializes Experiment Graphics genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, screen) pylink.openGraphicsEx(genv) # opendatafile self.tracker.openDataFile(self.edfFileName) #EyeLink Tracker Configuration pylink.flushGetkeyQueue() # Initializes the key queue used by getkey(). It may be called at any time to get rid any of old keys from the queue. self.tracker.setOfflineMode() #Places EyeLink tracker in off-line (idle) mode. Wait till the tracker has finished the mode transition self.tracker.sendCommand("screen_pixel_coords = 0 0 %d %d" % (tuple(screen))) self.tracker.setCalibrationType(calibrationType) self.tracker.sendCommand( "driftcorrect_cr_disable=OFF" ) #CF - OFF: turns on drift CORRECT; AUTO: Turns on drift CHECK; ON: Turns off both #self.tracker.sendCommand("generate_default_targets = NO") #self.tracker.sendCommand("calibration_targets = 512,384 512,417 512,351 402,384 622,384 402,417 622,417 402,351 622,351") #self.tracker.sendCommand("validation_targets = 512,384 512,417 512,351 402,384 622,384 402,417 622,417 402,351 622,351") self.tracker.sendMessage("DISPLAY_COORDS 0 0 %d %d" % (tuple(screen))) eyelink_ver = self.tracker.getTrackerVersion() if eyelink_ver == 3: tvstr = self.tracker.getTrackerVersionString() vindex = tvstr.find("EYELINK CL") tracker_software_ver = int( float(tvstr[(vindex + len("EYELINK CL")):].strip())) else: tracker_software_ver = 0 if eyelink_ver >= 2: self.tracker.sendCommand("select_parser_configuration %d" % saccadeSensitivity) else: if saccadeSensitivity == HIGH: svt, sat = 22, 5000 else: svt, sat = 30, 9500 self.tracker.sendCommand("saccade_velocity_threshold = %d" % svt) self.tracker.sendCommand("saccade_acceleration_threshold = %d" % sat) if eyelink_ver == 2: #turn off scenelink camera stuff self.tracker.sendCommand("scene_camera_gazemap = NO") # set EDF file contents self.tracker.setFileEventFilter( "LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON") if tracker_software_ver >= 4: self.tracker.setFileSampleFilter( "LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET") else: self.tracker.setFileSampleFilter( "LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS") # set link data (used for gaze cursor) self.tracker.setLinkEventFilter( "LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON") if tracker_software_ver >= 4: self.tracker.setLinkSampleFilter( "LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET") else: self.tracker.setLinkSampleFilter( "LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS") #self.tracker.setAcceptTargetFixationButton(1) # This programs a specific button for use in drift correction. #Set the calibration settings: #pylink.setCalibrationColors(WHITE, BLACK) # Sets the calibration target and background color(foreground_color, background_color) if CalibrationSounds: pylink.setCalibrationSounds("", "", "") pylink.setDriftCorrectSounds("", "off", "off") else: pylink.setCalibrationSounds("off", "off", "off") pylink.setDriftCorrectSounds("off", "off", "off") print("Beginning tracker setup") self.tracker.doTrackerSetup()
# set up the window rectXY = (1920, 1080) surf = visual.Window(size=rectXY, fullscr=True, winType='pygame') #surf.setMouseVisible(False) #surf.waitBlanking = False #load images memory_image = visual.SimpleImageStim( surf, image=path_to_fixdur_code + 'images/memory.png') #,pos=(rectXY[0]/2,rectXY[1]/2)) fix_cross = visual.SimpleImageStim(surf, image=path_to_fixdur_code + 'images/fixationcross.png') # set up eyetracker #pylink.openGraphics() openGraphicsEx(EyeLinkCoreGraphicsOpenGL(surf)) rand_filename = ''.join( random.choice(string.ascii_uppercase + string.digits) for x in range(8)) print rand_filename #el = tracker.Tracker(surf,rand_filename+'.EDF') #track = PsychoTracker(surf, rand_filename) #track.set_calibration(13) #el.setup() #[] = 2 #start slide show tools.slideshow( surf, np.sort(glob.glob(path_to_fixdur_code + 'images/instructions/intro*.png'))) ##el.setup() # show fixation cross