def __init__(self, display): """Initiates an eyetracker dummy object, that simulates gaze position using the mouse arguments display -- a pygaze display.Display instance keyword arguments None """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, Dummy) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass self.recording = False self.blinking = False self.bbpos = (settings.DISPSIZE[0]/2, settings.DISPSIZE[1]/2) self.resolution = settings.DISPSIZE[:] self.simulator = Mouse(disptype=settings.DISPTYPE, mousebuttonlist=None, timeout=2, visible=False) self.kb = Keyboard(disptype=settings.DISPTYPE, keylist=None, timeout=None) self.angrybeep = Sound(osc='saw',freq=100, length=100, attack=0, decay=0, soundfile=None) self.display = display self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False)
class SMItracker(BaseEyeTracker): """A class for SMI eye tracker objects""" def __init__(self, display, ip='127.0.0.1', sendport=4444, receiveport=5555, logfile=settings.LOGFILE, eventdetection=settings.EVENTDETECTION, saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, **args): """Initializes the SMItracker object arguments display -- a pygaze.display.Display instance keyword arguments ip -- internal ip address for iViewX (default = '127.0.0.1') sendport -- port number for iViewX sending (default = 4444) receiveport -- port number for iViewX receiving (default = 5555) logfile -- logfile name (string value); note that this is the name for the SMI logfile, NOT the .idf file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, SMITracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = settings.DISPSIZE # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # output file properties self.outputfile = logfile self.description = "experiment" # TODO: EXPERIMENT NAME self.participant = "participant" # TODO: PP NAME # eye tracker properties self.connected = False self.recording = False self.eye_used = 0 # 0=left, 1=right, 2=binocular self.left_eye = 0 self.right_eye = 1 self.binocular = 2 self.errdist = 2 # degrees; maximal error for drift correction self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1, -1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # set logger res = iViewXAPI.iV_SetLogger(c_int(1), c_char_p(logfile + '_SMILOG.txt')) if res != 1: err = errorstring(res) raise Exception( "Error in libsmi.SMItracker.__init__: failed to set logger; %s" % err) # first logger argument is for logging type (I'm guessing these are decimal bit codes) # LOG status bitcode # 1 = LOG_LEVEL_BUG 00001 # 2 = LOG_LEVEL_iV_FCT 00010 # 4 = LOG_LEVEL_ETCOM 00100 # 8 = LOG_LEVEL_ALL 01000 # 16 = LOG_LEVEL_IV_COMMAND 10000 # these can be used together, using a bitwise or, e.g.: 1|2|4 (bitcode 00111) # connect to iViewX res = iViewXAPI.iV_Connect(c_char_p(ip), c_int(sendport), c_char_p(ip), c_int(receiveport)) if res == 1: res = iViewXAPI.iV_GetSystemInfo(byref(systemData)) self.samplerate = systemData.samplerate self.sampletime = 1000.0 / self.samplerate if res != 1: err = errorstring(res) raise Exception( "Error in libsmi.SMItracker.__init__: failed to get system information; %s" % err) # handle connection errors else: self.connected = False err = errorstring(res) raise Exception( "Error in libsmi.SMItracker.__init__: establishing connection failed; %s" % err) # initiation report self.log("pygaze initiation report start") self.log("experiment: %s" % self.description) self.log("participant: %s" % self.participant) self.log("display resolution: %sx%s" % (self.dispsize[0], self.dispsize[1])) self.log("display size in cm: %sx%s" % (self.screensize[0], self.screensize[1])) self.log("samplerate: %s Hz" % self.samplerate) self.log("sampletime: %s ms" % self.sampletime) self.log("fixation threshold: %s degrees" % self.fixtresh) self.log("speed threshold: %s degrees/second" % self.spdtresh) self.log("acceleration threshold: %s degrees/second**2" % self.accthresh) self.log("pygaze initiation report end") def calibrate(self, calibrate=True, validate=True): """Calibrates the eye tracking system arguments None keyword arguments calibrate -- Boolean indicating if calibration should be performed (default = True) validate -- Boolean indicating if validation should be performed (default = True) returns success -- returns True if calibration succeeded, or False if not; in addition a calibration log is added to the log file and some properties are updated (i.e. the thresholds for detection algorithms) """ # TODO: # add feedback for calibration (e.g. with iV_GetAccuracyImage (struct ImageStruct * imageData) for accuracy and iV_GetEyeImage for cool eye pictures) # example: res = iViewXAPI.iV_GetEyeImage(byref(imageData)) # ImageStruct has four data fields: # imageHeight -- int vertical size (px) # imageWidth -- int horizontal size (px) # imageSize -- int image data size (byte) # imageBuffer -- pointer to image data (I have NO idea what format this is in) # configure calibration (NOT starting it) calibrationData = CCalibration( 9, 1, 0, 1, 1, 0, 127, 1, 15, b"" ) # (method (i.e.: number of points), visualization, display, speed, auto, fg, bg, shape, size, filename) # setup calibration res = iViewXAPI.iV_SetupCalibration(byref(calibrationData)) if res != 1: err = errorstring(res) raise Exception( "Error in libsmi.SMItracker.calibrate: failed to setup calibration; %s" % err) # calibrate cres = iViewXAPI.iV_Calibrate() # validate if calibration returns succes if cres == 1: cerr = None vres = iViewXAPI.iV_Validate() # handle validation errors if vres != 1: verr = errorstring(vres) else: verr = None ## # TEST # ## res = iViewXAPI.iV_GetAccuracyImage(byref(imageData)) ## self.log("IMAGEBUFFERSTART") ## self.log(imageData.imageBuffer) ## self.log("IMAGEBUFFERSTOP") ## print("Image height: %s, image width: %s, image size: %s" % (imageData.imageHeight,imageData.imageWidth, imageData.imageSize)) ## print imageData.imageBuffer ## ######## # handle calibration errors else: cerr = errorstring(cres) # return succes if cerr == None: print("libsmi.SMItracker.calibrate: calibration was succesful") if verr == None: print("libsmi.SMItracker.calibrate: validation was succesful") # present instructions self.disp.fill() # clear display self.screen.draw_text( text= "Noise calibration: please look at the dot\n\n(press space to start)", pos=(self.dispsize[0] / 2, int(self.dispsize[1] * 0.2)), center=True) self.screen.draw_fixation(fixtype='dot') self.disp.fill(self.screen) self.disp.show() self.screen.clear() # clear screen again # wait for spacepress self.kb.get_key(keylist=['space'], timeout=None) # show fixation self.disp.fill() self.screen.draw_fixation(fixtype='dot') self.disp.fill(self.screen) self.disp.show() self.screen.clear() # wait for a bit, to allow participant to fixate clock.pause(500) # get samples sl = [ self.sample() ] # samplelist, prefilled with 1 sample to prevent sl[-1] from producing an error; first sample will be ignored for RMS calculation t0 = clock.get_time() # starting time while clock.get_time() - t0 < 1000: s = self.sample() # sample if s != sl[-1] and s != (-1, -1) and s != (0, 0): sl.append(s) # calculate RMS noise Xvar = [] Yvar = [] for i in range(2, len(sl)): Xvar.append((sl[i][0] - sl[i - 1][0])**2) Yvar.append((sl[i][1] - sl[i - 1][1])**2) XRMS = (sum(Xvar) / len(Xvar))**0.5 YRMS = (sum(Yvar) / len(Yvar))**0.5 self.pxdsttresh = (XRMS, YRMS) # calculate pixels per cm pixpercm = (self.dispsize[0] / float(self.screensize[0]) + self.dispsize[1] / float(self.screensize[1])) / 2 # get accuracy res = 0 i = 0 while res != 1 and i < self.maxtries: # multiple tries, in case no (valid) sample is available res = iViewXAPI.iV_GetAccuracy( byref(accuracyData), 0) # 0 is for 'no visualization' i += 1 clock.pause(int(self.sampletime)) # wait for sampletime if res == 1: self.accuracy = ( (accuracyData.deviationLX, accuracyData.deviationLY), (accuracyData.deviationLX, accuracyData.deviationLY) ) # dsttresh = (left tuple, right tuple); tuple = (horizontal deviation, vertical deviation) in degrees of visual angle else: err = errorstring(res) print( "WARNING libsmi.SMItracker.calibrate: failed to obtain accuracy data; %s" % err) self.accuracy = ((2, 2), (2, 2)) print( "libsmi.SMItracker.calibrate: As an estimate, the intersample distance threshhold was set to it's default value of 2 degrees" ) # get distance from screen to eyes (information from tracker) res = 0 i = 0 while res != 1 and i < self.maxtries: # multiple tries, in case no (valid) sample is available res = iViewXAPI.iV_GetSample(byref(sampleData)) i += 1 clock.pause(int(self.sampletime)) # wait for sampletime if res == 1: screendist = sampleData.leftEye.eyePositionZ / 10.0 # eyePositionZ is in mm; screendist is in cm else: err = errorstring(res) print( "WARNING libsmi.SMItracker.calibrate: failed to obtain screen distance; %s" % err) screendist = settings.SCREENDIST print( "libsmi.SMItracker.calibrate: As an estimate, the screendistance was set to it's default value of 57 cm" ) # calculate thresholds based on tracker settings self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm) self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm) self.pxaccuracy = ((deg2pix(screendist, self.accuracy[0][0], pixpercm), deg2pix(screendist, self.accuracy[0][1], pixpercm)), (deg2pix(screendist, self.accuracy[1][0], pixpercm), deg2pix(screendist, self.accuracy[1][1], pixpercm))) self.pxspdtresh = deg2pix( screendist, self.spdtresh / 1000.0, pixpercm) # in pixels per millisecond self.pxacctresh = deg2pix( screendist, self.accthresh / 1000.0, pixpercm) # in pixels per millisecond**2 # calibration report self.log("pygaze calibration report start") self.log("accuracy (degrees): LX=%s, LY=%s, RX=%s, RY=%s" % (self.accuracy[0][0], self.accuracy[0][1], self.accuracy[1][0], self.accuracy[1][1])) self.log("accuracy (in pixels): LX=%s, LY=%s, RX=%s, RY=%s" % (self.pxaccuracy[0][0], self.pxaccuracy[0][1], self.pxaccuracy[1][0], self.pxaccuracy[1][1])) self.log("precision (RMS noise in pixels): X=%s, Y=%s" % (self.pxdsttresh[0], self.pxdsttresh[1])) self.log("distance between participant and display: %s cm" % screendist) self.log("fixation threshold: %s pixels" % self.pxfixtresh) self.log("speed threshold: %s pixels/ms" % self.pxspdtresh) self.log("acceleration threshold: %s pixels/ms**2" % self.pxacctresh) self.log("pygaze calibration report end") return True # validation error else: print( "WARNING libsmi.SMItracker.calibrate: validation was unsuccesful %s" % verr) return False # calibration error else: print( "WARNING libsmi.SMItracker.calibrate: calibration was unsuccesful; %s" % cerr) return False def close(self): """Neatly close connection to tracker arguments None returns Nothing -- saves data and sets self.connected to False """ # save data res = iViewXAPI.iV_SaveData(str(self.outputfile), str(self.description), str(self.participant), 1) if res != 1: err = errorstring(res) raise Exception( "Error in libsmi.SMItracker.close: failed to save data; %s" % err) # close connection iViewXAPI.iV_Disconnect() self.connected = False def connected(self): """Checks if the tracker is connected arguments None returns connected -- True if connection is established, False if not; sets self.connected to the same value """ res = iViewXAPI.iV_IsConnected() if res == 1: self.connected = True else: self.connected = False return self.connected def drift_correction(self, pos=None, fix_triggered=False): """Performs a drift check arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) fix_triggered -- Boolean indicating if drift check should be performed based on gaze position (fix_triggered = True) or on spacepress (fix_triggered = False) (default = False) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if fix_triggered: return self.fix_triggered_drift_correction(pos) if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 pressed = False while not pressed: pressed, presstime = self.kb.get_key() if pressed: if pressed == 'escape' or pressed == 'q': print( "libsmi.SMItracker.drift_correction: 'q' or 'escape' pressed" ) return self.calibrate(calibrate=True, validate=True) gazepos = self.sample() if ((gazepos[0] - pos[0])**2 + (gazepos[1] - pos[1])**2)**0.5 < self.pxerrdist: return True else: self.errorbeep.play() return False def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30): """Performs a fixation triggered drift correction by collecting a number of samples and calculating the average distance from the fixation position arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) min_samples -- minimal amount of samples after which an average deviation is calculated (default = 10) max_dev -- maximal deviation from fixation in pixels (default = 60) reset_threshold -- if the horizontal or vertical distance in pixels between two consecutive samples is larger than this threshold, the sample collection is reset (default = 30) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 # loop until we have sufficient samples lx = [] ly = [] while len(lx) < min_samples: # pressing escape enters the calibration screen if self.kb.get_key()[0] in ['escape', 'q']: print( "libsmi.SMItracker.fix_triggered_drift_correction: 'q' or 'escape' pressed" ) return self.calibrate(calibrate=True, validate=True) # collect a sample x, y = self.sample() if len(lx) == 0 or x != lx[-1] or y != ly[-1]: # if present sample deviates too much from previous sample, reset counting if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold): lx = [] ly = [] # collect samples else: lx.append(x) ly.append(y) if len(lx) == min_samples: avg_x = sum(lx) / len(lx) avg_y = sum(ly) / len(ly) d = ((avg_x - pos[0])**2 + (avg_y - pos[1])**2)**0.5 if d < max_dev: return True else: lx = [] ly = [] def get_eyetracker_clock_async(self): """Not supported for SMItracker (yet)""" print("function not supported yet") def log(self, msg): """Writes a message to the log file arguments ms -- a string to include in the log file returns Nothing -- uses native log function of iViewX to include a line in the log file """ res = iViewXAPI.iV_Log(c_char_p(msg)) if res != 1: err = errorstring(res) print( "WARNING libsmi.SMItracker.log: failed to log message '%s'; %s" % (msg, err)) def log_var(self, var, val): """Writes a variable to the log file arguments var -- variable name val -- variable value returns Nothing -- uses native log function of iViewX to include a line in the log file in a "var NAME VALUE" layout """ msg = "var %s %s" % (var, val) res = iViewXAPI.iV_Log(c_char_p(msg)) if res != 1: err = errorstring(res) print( "WARNING libsmi.SMItracker.log_var: failed to log variable '%s' with value '%s'; %s" % (var, val, err)) def prepare_backdrop(self): """Not supported for SMItracker (yet)""" print("function not supported yet") def prepare_drift_correction(self, pos): """Not supported for SMItracker (yet)""" print("function not supported yet") def pupil_size(self): """Return pupil size arguments None returns pupil size -- returns pupil diameter for the eye that is currently being tracked (as specified by self.eye_used) or -1 when no data is obtainable """ res = iViewXAPI.iV_GetSample(byref(sampleData)) # if a new sample exists if res == 1: # left eye if self.eye_used == self.left_eye: ps = sampleData.leftEye.diam # right eye else: ps = sampleData.rightEye.diam # set prvious pupil size to newest pupil size self.prevps = ps return ps # no new sample available elif res == 2: return self.prevps # invalid data else: # print warning to interpreter err = errorstring(res) print( "WARNING libsmi.SMItracker.pupil_size: failed to obtain sample; %s" % err) return -1 def sample(self): """Returns newest available gaze position arguments None returns sample -- an (x,y) tuple or a (-1,-1) on an error """ res = iViewXAPI.iV_GetSample(byref(sampleData)) if self.eye_used == self.right_eye: newsample = sampleData.rightEye.gazeX, sampleData.rightEye.gazeY else: newsample = sampleData.leftEye.gazeX, sampleData.leftEye.gazeY if res == 1: self.prevsample = newsample[:] return newsample elif res == 2: return self.prevsample else: err = errorstring(res) print( "WARNING libsmi.SMItracker.sample: failed to obtain sample; %s" % err) return (-1, -1) def send_command(self, cmd): """Sends a command to the eye tracker arguments cmd -- the command (a string value) to be sent to iViewX returns Nothing """ try: iViewXAPI.iV_SendCommand(c_char_p(cmd)) except: raise Exception( "Error in libsmi.SMItracker.send_command: failed to send remote command to iViewX (iV_SendCommand might be deprecated)" ) def set_backdrop(self): """Not supported for SMItracker (yet)""" print("function not supported yet") def set_eye_used(self): """Logs the eye_used variable, based on which eye was specified (if both eyes are being tracked, the left eye is used) arguments None returns Nothing -- logs which eye is used by calling self.log_var, e.g. self.log_var("eye_used", "right") """ if self.eye_used == self.right_eye: self.log_var("eye_used", "right") else: self.log_var("eye_used", "left") def start_recording(self): """Starts recording eye position arguments None returns Nothing -- sets self.recording to True when recording is successfully started """ res = 0 i = 0 while res != 1 and i < self.maxtries: res = iViewXAPI.iV_StartRecording() i += 1 if res == 1: self.recording = True else: self.recording = False err = errorstring(res) raise Exception("Error in libsmi.SMItracker.start_recording: %s" % err) def status_msg(self, msg): """Not supported for SMItracker (yet)""" print("function not supported yet") def stop_recording(self): """Stop recording eye position arguments None returns Nothing -- sets self.recording to False when recording is successfully started """ res = 0 i = 0 while res != 1 and i < self.maxtries: res = iViewXAPI.iV_StopRecording() i += 1 if res == 1: self.recording = False else: self.recording = False err = errorstring(res) raise Exception("Error in libsmi.SMItracker.stop_recording: %s" % err) def set_detection_type(self, eventdetection): """Set the event detection type to either PyGaze algorithms, or native algorithms as provided by the manufacturer (only if available: detection type will default to PyGaze if no native functions are available) arguments eventdetection -- a string indicating which detection type should be employed: either 'pygaze' for PyGaze event detection algorithms or 'native' for manufacturers algorithms (only if available; will default to 'pygaze' if no native event detection is available) returns -- detection type for saccades, fixations and blinks in a tuple, e.g. ('pygaze','native','native') when 'native' was passed, but native detection was not available for saccade detection """ if eventdetection in ['pygaze', 'native']: self.eventdetection = eventdetection return ('pygaze', 'native', 'pygaze') def wait_for_event(self, event): """Waits for event arguments event -- an integer event code, one of the following: 3 = STARTBLINK 4 = ENDBLINK 5 = STARTSACC 6 = ENDSACC 7 = STARTFIX 8 = ENDFIX returns outcome -- a self.wait_for_* method is called, depending on the specified event; the return values of corresponding method are returned """ if event == 5: outcome = self.wait_for_saccade_start() elif event == 6: outcome = self.wait_for_saccade_end() elif event == 7: outcome = self.wait_for_fixation_start() elif event == 8: outcome = self.wait_for_fixation_end() elif event == 3: outcome = self.wait_for_blink_start() elif event == 4: outcome = self.wait_for_blink_end() else: raise Exception( "Error in libsmi.SMItracker.wait_for_event: eventcode %s is not supported" % event) return outcome def wait_for_blink_end(self): """Waits for a blink end and returns the blink ending time arguments None returns timestamp -- blink ending time in milliseconds, as measured from experiment begin time """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but SMI does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = True # loop while there is a blink while blinking: # get newest sample gazepos = self.sample() # check if it's valid if self.is_valid_sample(gazepos): # if it is a valid sample, blinking has stopped blinking = False # return timestamp of blink end return clock.get_time() def wait_for_blink_start(self): """Waits for a blink start and returns the blink starting time arguments None returns timestamp -- blink starting time in milliseconds, as measured from experiment begin time """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but SMI does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = False # loop until there is a blink while not blinking: # get newest sample gazepos = self.sample() # check if it's a valid sample if not self.is_valid_sample(gazepos): # get timestamp for possible blink start t0 = clock.get_time() # loop until a blink is determined, or a valid sample occurs while not self.is_valid_sample(self.sample()): # check if time has surpassed 150 ms if clock.get_time() - t0 >= 150: # return timestamp of blink start return t0 def wait_for_fixation_end(self): """Returns time and gaze position when a fixation has ended; function assumes that a 'fixation' has ended when a deviation of more than self.pxfixtresh from the initial fixation position has been detected (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # SMI method if self.eventdetection == 'native': moving = True while moving: # get newest event res = 0 while res != 1: res = iViewXAPI.iV_GetEvent(byref(eventData)) stime = clock.get_time() # check if event is a fixation (SMI only supports # fixations at the moment) if eventData.eventType == 'F': # get timestamp and starting position timediff = stime - (int(eventData.startTime) / 1000.0) etime = timediff + (int(eventData.endTime) / 1000.0 ) # time is in microseconds fixpos = (evenData.positionX, evenData.positionY) # return starting time and position return etime, fixpos # # # # # # PyGaze method else: # function assumes that a 'fixation' has ended when a deviation of more than fixtresh # from the initial 'fixation' position has been detected # get starting time and position stime, spos = self.wait_for_fixation_start() # loop until fixation has ended while True: # get new sample npos = self.sample() # get newest sample # check if sample is valid if self.is_valid_sample(npos): # check if sample deviates to much from starting position if (npos[0] - spos[0])**2 + ( npos[1] - spos[1])**2 > self.pxfixtresh**2: # Pythagoras # break loop if deviation is too high break return clock.get_time(), spos def wait_for_fixation_start(self): """Returns starting time and position when a fixation is started; function assumes a 'fixation' has started when gaze position remains reasonably stable (i.e. when most deviant samples are within self.pxfixtresh) for five samples in a row (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a fixation start # detection built into their API (only ending) print("WARNING! 'native' event detection has been selected, \ but SMI does not offer fixation START detection (only \ fixation ENDING; PyGaze algorithm will be used") # # # # # # PyGaze method # function assumes a 'fixation' has started when gaze position # remains reasonably stable for self.fixtimetresh # get starting position spos = self.sample() while not self.is_valid_sample(spos): spos = self.sample() # get starting time t0 = clock.get_time() # wait for reasonably stable position moving = True while moving: # get new sample npos = self.sample() # check if sample is valid if self.is_valid_sample(npos): # check if new sample is too far from starting position if (npos[0] - spos[0])**2 + ( npos[1] - spos[1])**2 > self.pxfixtresh**2: # Pythagoras # if not, reset starting position and time spos = copy.copy(npos) t0 = clock.get_time() # if new sample is close to starting sample else: # get timestamp t1 = clock.get_time() # check if fixation time threshold has been surpassed if t1 - t0 >= self.fixtimetresh: # return time and starting position return t1, spos def wait_for_saccade_end(self): """Returns ending time, starting and end position when a saccade is ended; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos, endpos -- endtime in milliseconds (from expbegintime); startpos and endpos are (x,y) gaze position tuples """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but SMI does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) t0, spos = self.wait_for_saccade_start() # get valid sample prevpos = self.sample() while not self.is_valid_sample(prevpos): prevpos = self.sample() # get starting time, intersample distance, and velocity t1 = clock.get_time() s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])** 2)**0.5 # = intersample distance = speed in px/sample v0 = s / (t1 - t0) # run until velocity and acceleration go below threshold saccadic = True while saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # calculate distance s = ((newpos[0] - prevpos[0])**2 + (newpos[1] - prevpos[1])** 2)**0.5 # = speed in pixels/sample # calculate velocity v1 = s / (t1 - t0) # calculate acceleration a = (v1 - v0) / ( t1 - t0 ) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample) # check if velocity and acceleration are below threshold if v1 < self.pxspdtresh and (a > -1 * self.pxacctresh and a < 0): saccadic = False epos = newpos[:] etime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return etime, spos, epos def wait_for_saccade_start(self): """Returns starting time and starting position when a saccade is started; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos -- endtime in milliseconds (from expbegintime); startpos is an (x,y) gaze position tuple """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but SMI does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) newpos = self.sample() while not self.is_valid_sample(newpos): newpos = self.sample() # get starting time, position, intersampledistance, and velocity t0 = clock.get_time() prevpos = newpos[:] s = 0 v0 = 0 # get samples saccadic = False while not saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # check if distance is larger than precision error sx = newpos[0] - prevpos[0] sy = newpos[1] - prevpos[1] if (sx / self.pxdsttresh[0])**2 + ( sy / self.pxdsttresh[1] )**2 > self.weightdist: # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise # calculate distance s = ((sx)**2 + (sy)** 2)**0.5 # intersampledistance = speed in pixels/ms # calculate velocity v1 = s / (t1 - t0) # calculate acceleration a = (v1 - v0) / (t1 - t0) # acceleration in pixels/ms**2 # check if either velocity or acceleration are above threshold values if v1 > self.pxspdtresh or a > self.pxacctresh: saccadic = True spos = prevpos[:] stime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return stime, spos def is_valid_sample(self, gazepos): """Checks if the sample provided is valid, based on SMI specific criteria (for internal use) arguments gazepos -- a (x,y) gaze position tuple, as returned by self.sample() returns valid -- a Boolean: True on a valid sample, False on an invalid sample """ # return False if a sample is invalid if gazepos == (-1, -1): return False # sometimes, on SMI devices, invalid samples can actually contain # numbers; these do elif sum(gazepos) < 10 and 0.0 in gazepos: return False # in any other case, the sample is valid return True
class Dummy(DumbDummy): """A dummy class to run experiments in dummy mode, where eye movements are simulated by the mouse""" def __init__(self, display): """Initiates an eyetracker dummy object, that simulates gaze position using the mouse arguments display -- a pygaze display.Display instance keyword arguments None """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, Dummy) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass self.recording = False self.blinking = False self.bbpos = (settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2) self.resolution = settings.DISPSIZE[:] self.simulator = Mouse(disptype=settings.DISPTYPE, mousebuttonlist=None, timeout=2, visible=False) self.kb = Keyboard(disptype=settings.DISPTYPE, keylist=None, timeout=None) self.angrybeep = Sound(osc='saw', freq=100, length=100, attack=0, decay=0, soundfile=None) self.display = display self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False) def calibrate(self): """Dummy calibration""" print("Calibration would now take place") clock.pause(1000) def drift_correction(self, pos=None, fix_triggered=False): """Dummy drift correction""" print("Drift correction would now take place") if fix_triggered: return self.fix_triggered_drift_correction(pos) if pos == None: pos = settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2 # show mouse self.simulator.set_visible(visible=True) # show fixation dot self.draw_drift_correction_target(pos[0], pos[1]) # perform drift check errdist = 60 # pixels (on a 1024x768px and 39.9x29.9cm monitor at 67 cm, this is about 2 degrees of visual angle) pressed = None while True: # check for keyboard input pressed, presstime = self.kb.get_key( keylist=['q', 'escape', 'space'], timeout=1) # quit key if pressed in ['q', 'escape']: # hide mouse self.simulator.set_visible(visible=False) return False # space bar elif pressed == 'space': # get sample gazepos = self.sample() # sample is close enough to fixation dot if ((gazepos[0] - pos[0])**2 + (gazepos[1] - pos[1])**2)**0.5 < errdist: # hide mouse self.simulator.set_visible(visible=False) return True # sample is NOT close enough to fixation dot else: # show discontent self.angrybeep.play() def fix_triggered_drift_correction(self, pos=None, min_samples=30, max_dev=60, reset_threshold=10): """Dummy drift correction (fixation triggered)""" print("Drift correction (fixation triggered) would now take place") if pos == None: pos = settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2 # show mouse self.simulator.set_visible(visible=True) # show fixation dot self.draw_drift_correction_target(pos[0], pos[1]) while True: # loop until we have sufficient samples lx = [] ly = [] while len(lx) < min_samples: # pressing escape enters the calibration screen if self.kb.get_key(keylist=["escape", "q"], timeout=0)[0] != None: self.recording = False print( "libeyetracker.libeyetracker.fix_triggered_drift_correction(): 'q' pressed" ) self.simulator.set_visible(visible=False) return False # collect a sample x, y = self.sample() if len(lx) == 0 or x != lx[-1] or y != ly[-1]: # if present sample deviates too much from previous sample, reset counting if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold): lx = [] ly = [] # collect samples else: lx.append(x) ly.append(y) # check if samples are within max. deviation if len(lx) == min_samples: avg_x = sum(lx) / len(lx) avg_y = sum(ly) / len(ly) d = ((avg_x - pos[0])**2 + (avg_y - pos[1])**2)**0.5 if d < max_dev: self.simulator.set_visible(visible=False) return True else: lx = [] ly = [] def start_recording(self): """Dummy for starting recording, prints what would have been the recording start""" self.simulator.set_visible(visible=True) dumrectime = clock.get_time() self.recording = True print("Recording would have started at: " + str(dumrectime)) def stop_recording(self): """Dummy for stopping recording, prints what would have been the recording end""" self.simulator.set_visible(visible=False) dumrectime = clock.get_time() self.recording = False print("Recording would have stopped at: " + str(dumrectime)) def close(self): """Dummy for closing connection with eyetracker, prints what would have been connection closing time""" if self.recording: self.stop_recording() closetime = clock.get_time() print("eyetracker connection would have closed at: " + str(closetime)) def pupil_size(self): """Returns dummy pupil size""" return 19 def sample(self): """Returns simulated gaze position (=mouse position)""" if self.blinking: if self.simulator.get_pressed()[2]: # buttondown self.simulator.set_pos(pos=( self.bbpos[0], self.resolution[1])) # set position to blinking position elif not self.simulator.get_pressed()[2]: # buttonup self.simulator.set_pos( pos=self.bbpos) # set position to position before blinking self.blinking = False # 'blink' stopped elif not self.blinking: if self.simulator.get_pressed()[2]: # buttondown self.blinking = True # 'blink' started self.bbpos = self.simulator.get_pos( ) # position before blinking self.simulator.set_pos(pos=( self.bbpos[0], self.resolution[1])) # set position to blinking position return self.simulator.get_pos() def wait_for_saccade_start(self): """Returns starting time and starting position when a simulated saccade is started""" # function assumes that a 'saccade' has been started when a deviation of more than # maxerr from the initial 'gaze' position has been detected (using Pythagoras, ofcourse) spos = self.sample() # starting position maxerr = 3 # pixels while True: npos = self.sample() # get newest sample if ((spos[0] - npos[0])**2 + (spos[1] - npos[1])**2)**0.5 > maxerr: # Pythagoras break return clock.get_time(), spos def wait_for_saccade_end(self): """Returns ending time, starting and end position when a simulated saccade is ended""" # function assumes that a 'saccade' has ended when 'gaze' position remains reasonably # (i.e.: within maxerr) stable for five samples # for saccade start algorithm, see wait_for_fixation_start stime, spos = self.wait_for_saccade_start() maxerr = 3 # pixels # wait for reasonably stable position xl = [] # list for last five samples (x coordinate) yl = [] # list for last five samples (y coordinate) moving = True while moving: # check positions npos = self.sample() xl.append(npos[0]) # add newest sample yl.append(npos[1]) # add newest sample if len(xl) == 5: # check if deviation is small enough if max(xl) - min(xl) < maxerr and max(yl) - min(yl) < maxerr: moving = False # remove oldest sample xl.pop(0) yl.pop(0) # wait for a bit, to avoid immediately returning (runs go faster than mouse moves) clock.pause(10) return clock.get_time(), spos, (xl[len(xl) - 1], yl[len(yl) - 1]) def wait_for_fixation_start(self): """Returns starting time and position when a simulated fixation is started""" # function assumes a 'fixation' has started when 'gaze' position remains reasonably # stable for five samples in a row (same as saccade end) maxerr = 3 # pixels # wait for reasonably stable position xl = [] # list for last five samples (x coordinate) yl = [] # list for last five samples (y coordinate) moving = True while moving: npos = self.sample() xl.append(npos[0]) # add newest sample yl.append(npos[1]) # add newest sample if len(xl) == 5: # check if deviation is small enough if max(xl) - min(xl) < maxerr and max(yl) - min(yl) < maxerr: moving = False # remove oldest sample xl.pop(0) yl.pop(0) # wait for a bit, to avoid immediately returning (runs go faster than mouse moves) clock.pause(10) return clock.get_time(), (xl[len(xl) - 1], yl[len(yl) - 1]) def wait_for_fixation_end(self): """Returns time and gaze position when a simulated fixation is ended""" # function assumes that a 'fixation' has ended when a deviation of more than maxerr # from the initial 'fixation' position has been detected (using Pythagoras, ofcourse) stime, spos = self.wait_for_fixation_start() maxerr = 3 # pixels while True: npos = self.sample() # get newest sample if ((spos[0] - npos[0])**2 + (spos[1] - npos[1])**2)**0.5 > maxerr: # Pythagoras break return clock.get_time(), spos def wait_for_blink_start(self): """Returns starting time and position of a simulated blink (mousebuttondown)""" # blinks are simulated with mouseclicks: a right mouseclick simulates the closing # of the eyes, a mousebuttonup the opening. while not self.blinking: pos = self.sample() return clock.get_time(), pos def wait_for_blink_end(self): """Returns ending time and position of a simulated blink (mousebuttonup)""" # blinks are simulated with mouseclicks: a right mouseclick simulates the closing # of the eyes, a mousebuttonup the opening. # wait for blink start while not self.blinking: spos = self.sample() # wait for blink end while self.blinking: epos = self.sample() return clock.get_time(), epos def set_draw_drift_correction_target_func(self, func): """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker""" self.draw_drift_correction_target = func # *** # # Internal functions below # # *** def draw_drift_correction_target(self, x, y): """ Draws the drift-correction target. arguments x -- The X coordinate y -- The Y coordinate """ self.screen.clear() self.screen.draw_fixation(fixtype='dot', colour=settings.FGC, \ pos=(x,y), pw=0, diameter=12) self.display.fill(self.screen) self.display.show()
def __init__(self, libeyelink, tracker): """ Constructor. Arguments: libeyelink -- A libeyelink object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.libeyelink = libeyelink self.display = libeyelink.display self.screen = Screen(disptype=DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=0) self.mouse = Mouse(timeout=0) if DISPTYPE == 'pygame': self.kb.set_timeout(timeout=0.001) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. #if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), '__eyelink__.jpg') # drawing properties self.xc = self.display.dispsize[0]/2 self.yc = self.display.dispsize[1]/2 self.extra_info = True self.ld = 40 # line distance self.fontsize = libeyelink.fontsize self.title = "" self.display_open = True # menu self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False) self.menuscreen.draw_text(text="Eyelink calibration menu", pos=(self.xc,self.yc-6*self.ld), center=True, font='mono', fontsize=int(2*self.fontsize), antialias=True) self.menuscreen.draw_text(text="%s (pygaze %s, pylink %s)" \ % (libeyelink.eyelink_model, pygaze.version, pylink.__version__), pos=(self.xc,self.yc-5*self.ld), center=True, font='mono', fontsize=int(.8*self.fontsize), antialias=True) self.menuscreen.draw_text(text="Press C to calibrate", pos=(self.xc, self.yc-3*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press V to validate", pos=(self.xc, self.yc-2*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press A to auto-threshold", pos=(self.xc,self.yc-1*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press I to toggle extra info in camera image", pos=(self.xc,self.yc-0*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press Enter to show camera image", pos=(self.xc,self.yc+1*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text( text="(then change between images using the arrow keys)", pos=(self.xc, self.yc+2*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press Escape to abort experiment", pos=(self.xc, self.yc+4*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press Q to exit menu", pos=(self.xc, self.yc+5*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) # beeps self.__target_beep__ = Sound(osc='sine', freq=440, length=50, attack=0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200, attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200, attack=0, decay=0, soundfile=None) # Colors self.color = { pylink.CR_HAIR_COLOR: pygame.Color('white'), pylink.PUPIL_HAIR_COLOR: pygame.Color('white'), pylink.PUPIL_BOX_COLOR: pygame.Color('green'), pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color('red'), pylink.MOUSE_CURSOR_COLOR: pygame.Color('red'), 'font': pygame.Color('white'), } # Font pygame.font.init() self.font = pygame.font.SysFont('Courier New', 11) # further properties self.state = None self.pal = None self.size = (0,0) self.set_tracker(tracker) self.last_mouse_state = -1 self.bit64 = '64bit' in platform.architecture() self.imagebuffer = self.new_array()
def __init__(self, display, logfile=settings.LOGFILE, \ eventdetection=settings.EVENTDETECTION, \ saccade_velocity_threshold=35, \ saccade_acceleration_threshold=9500, \ blink_threshold=settings.BLINKTHRESH, \ **args): # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, EyeLogicTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass self.disp = display self.screen = Screen() self.dispsize = self.disp.dispsize # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # show a message self.screen.clear() self.screen.draw_text( text="Initialising the eye tracker, please wait...", fontsize=20) self.disp.fill(self.screen) self.disp.show() # output file properties self.logfile = logfile # eye tracker properties self._recording = Event() self._recording.clear() self._calibrated = Event() self._calibrated.clear() self.eye_used = 2 # 0=left, 1=right, 2=binocular self.sampleLock = Lock() self.lastSample = None self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) # event detection properties self.pxfixtresh = 50; self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method self.eventdetection = eventdetection self._log_vars = [ \ "timestampMicroSec", \ "index", \ "porFilteredX", \ "porFilteredY", \ "porLeftX", \ "porLeftY", \ "pupilRadiusLeft", \ "porRightX", \ "porRightY", \ "pupilRadiusRight", \ ] # Open a new log file. dir_name = os.path.dirname(logfile) file_name = os.path.basename(logfile) name, ext = os.path.splitext(file_name) self._data_file_path = os.path.join(dir_name, name+".eyelogic.csv") self._log_file = open(self._data_file_path, "w") # Write a header to the log. header = ["TYPE"] header.extend(self._log_vars) self._sep = ";" self._log_file.write("Sep="+self._sep+"\n") self._log_file.write(self._sep.join(map(str, header))) # Create a lock to prevent simultaneous access to the log file. self._logging_queue = Queue() self._logging_queue_empty = Event() self._logging_queue_empty.set() self._connected = Event() self._connected.set() self._log_counter = 0 self._log_consolidation_freq = 60 self._logging_thread = Thread( target=self.loggingThread, \ name='PyGaze_EyeLogic_Logging', args=[]) global g_api g_api = self # log self.log("pygaze initiation") #self.log("experiment = {}".format(self.description)) #self.log("participant = {}".format(self.participant)) self.log("display resolution = {}x{}".format(self.dispsize[0], \ self.dispsize[1])) self.log("display size in cm = {}x{}".format(self.screensize[0], \ self.screensize[1])) self.log("fixation threshold = {} degrees".format(self.fixtresh)) self.log("speed threshold = {} degrees/second".format(self.spdtresh)) self.log("acceleration threshold = {} degrees/second**2".format( \ self.accthresh)) # connect self.api = ELApi( "PyGaze" ) self.api.registerGazeSampleCallback( gazeSampleCallback ) self.api.registerEventCallback( eventCallback ) resultConnect = self.api.connect() if (resultConnect != ELApi.ReturnConnect.SUCCESS): self._connected.clear() raise Exception("Cannot connect to EyeLogic server = {}".format(errorstringConnect(resultConnect))) self._connected.set() screenConfig = self.api.getScreenConfig() self.log("eye tracker is mounted on screen {}".format(screenConfig.id)) self.rawResolution = (screenConfig.resolutionX, screenConfig.resolutionY) self.log("raw screen resolution = {}x{}".format( self.rawResolution[0], self.rawResolution[1])) self.log("end pygaze initiation") deviceConfig = self.api.getDeviceConfig() if (deviceConfig.deviceSerial == 0): raise Exception("no eye tracking device connected") if (len(deviceConfig.frameRates) == 0): raise Exception("failed to read out device configuration") g_api.sampleRate = deviceConfig.frameRates[0] g_api.sampleTime = 1000.0 / g_api.sampleRate g_api.log("samplerate = {} Hz".format(g_api.sampleRate)) g_api.log("sampletime = {} ms".format(g_api.sampleTime)) self._logging_thread.start() self.screen.clear() self.disp.fill(self.screen) self.disp.show()
def __init__(self, libeyelink, tracker): """ Constructor. Arguments: libeyelink -- A libeyelink object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.libeyelink = libeyelink self.display = libeyelink.display self.screen = Screen(disptype=DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=0) self.mouse = Mouse(timeout=0) if DISPTYPE == "pygame": self.kb.set_timeout(timeout=0.001) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. # if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), "__eyelink__.jpg") # drawing properties self.xc = self.display.dispsize[0] / 2 self.yc = self.display.dispsize[1] / 2 self.extra_info = True self.ld = 40 # line distance self.fontsize = libeyelink.fontsize self.title = "" self.display_open = True # menu self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False) self.menuscreen.draw_text( text="Eyelink calibration menu", pos=(self.xc, self.yc - 6 * self.ld), center=True, font="mono", fontsize=int(2 * self.fontsize), antialias=True, ) self.menuscreen.draw_text( text="%s (pygaze %s, pylink %s)" % (libeyelink.eyelink_model, pygaze.version, pylink.__version__), pos=(self.xc, self.yc - 5 * self.ld), center=True, font="mono", fontsize=int(0.8 * self.fontsize), antialias=True, ) self.menuscreen.draw_text( text="Press C to calibrate", pos=(self.xc, self.yc - 3 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press V to validate", pos=(self.xc, self.yc - 2 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press A to auto-threshold", pos=(self.xc, self.yc - 1 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press I to toggle extra info in camera image", pos=(self.xc, self.yc - 0 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press Enter to show camera image", pos=(self.xc, self.yc + 1 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="(then change between images using the arrow keys)", pos=(self.xc, self.yc + 2 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press Escape to abort experiment", pos=(self.xc, self.yc + 4 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press Q to exit menu", pos=(self.xc, self.yc + 5 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) # beeps self.__target_beep__ = Sound(osc="sine", freq=440, length=50, attack=0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc="sine", freq=880, length=200, attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc="sine", freq=220, length=200, attack=0, decay=0, soundfile=None) # Colors self.color = { pylink.CR_HAIR_COLOR: pygame.Color("white"), pylink.PUPIL_HAIR_COLOR: pygame.Color("white"), pylink.PUPIL_BOX_COLOR: pygame.Color("green"), pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color("red"), pylink.MOUSE_CURSOR_COLOR: pygame.Color("red"), "font": pygame.Color("white"), } # Font pygame.font.init() self.font = pygame.font.SysFont("Courier New", 11) # further properties self.state = None self.pal = None self.size = (0, 0) self.set_tracker(tracker) self.last_mouse_state = -1 self.bit64 = "64bit" in platform.architecture() self.imagebuffer = self.new_array()
def __init__(self, display, ip='127.0.0.1', sendport=4444, receiveport= \ 5555, logfile=LOGFILE, eventdetection=EVENTDETECTION, \ saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, \ **args): """Initializes the SMItracker object arguments display -- a pygaze.display.Display instance keyword arguments ip -- internal ip address for iViewX (default = '127.0.0.1') sendport -- port number for iViewX sending (default = 4444) receiveport -- port number for iViewX receiving (default = 5555) logfile -- logfile name (string value); note that this is the name for the SMI logfile, NOT the .idf file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, SMITracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = DISPSIZE # display size in pixels self.screensize = SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw',freq=100, length=100) # output file properties self.outputfile = logfile self.description = "experiment" # TODO: EXPERIMENT NAME self.participant = "participant" # TODO: PP NAME # eye tracker properties self.connected = False self.recording = False self.eye_used = 0 # 0=left, 1=right, 2=binocular self.left_eye = 0 self.right_eye = 1 self.binocular = 2 self.errdist = 2 # degrees; maximal error for drift correction self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1,-1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # set logger res = iViewXAPI.iV_SetLogger(c_int(1), c_char_p(logfile + '_SMILOG.txt')) if res != 1: err = errorstring(res) raise Exception("Error in libsmi.SMItracker.__init__: failed to set logger; %s" % err) # first logger argument is for logging type (I'm guessing these are decimal bit codes) # LOG status bitcode # 1 = LOG_LEVEL_BUG 00001 # 2 = LOG_LEVEL_iV_FCT 00010 # 4 = LOG_LEVEL_ETCOM 00100 # 8 = LOG_LEVEL_ALL 01000 # 16 = LOG_LEVEL_IV_COMMAND 10000 # these can be used together, using a bitwise or, e.g.: 1|2|4 (bitcode 00111) # connect to iViewX res = iViewXAPI.iV_Connect(c_char_p(ip), c_int(sendport), c_char_p(ip), c_int(receiveport)) if res == 1: res = iViewXAPI.iV_GetSystemInfo(byref(systemData)) self.samplerate = systemData.samplerate self.sampletime = 1000.0 / self.samplerate if res != 1: err = errorstring(res) raise Exception("Error in libsmi.SMItracker.__init__: failed to get system information; %s" % err) # handle connection errors else: self.connected = False err = errorstring(res) raise Exception("Error in libsmi.SMItracker.__init__: establishing connection failed; %s" % err) # initiation report self.log("pygaze initiation report start") self.log("experiment: %s" % self.description) self.log("participant: %s" % self.participant) self.log("display resolution: %sx%s" % (self.dispsize[0],self.dispsize[1])) self.log("display size in cm: %sx%s" % (self.screensize[0],self.screensize[1])) self.log("samplerate: %s Hz" % self.samplerate) self.log("sampletime: %s ms" % self.sampletime) self.log("fixation threshold: %s degrees" % self.fixtresh) self.log("speed threshold: %s degrees/second" % self.spdtresh) self.log("acceleration threshold: %s degrees/second**2" % self.accthresh) self.log("pygaze initiation report end")
def __init__(self, display, logfile=LOGFILE, eventdetection=EVENTDETECTION, \ saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, \ **args): """Initializes the EyeTribeTracker object arguments display -- a pygaze.display.Display instance keyword arguments logfile -- logfile name (string value); note that this is the name for the eye data log file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, EyeTribeTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = DISPSIZE # display size in pixels self.screensize = SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw',freq=100, length=100) # output file properties self.outputfile = logfile # eye tracker properties self.connected = False self.recording = False self.errdist = 2 # degrees; maximal error for drift correction self.pxerrdist = 30 # initial error in pixels self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1,-1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # connect to the tracker self.eyetribe = EyeTribe(logfilename=logfile) # get info on the sample rate self.samplerate = self.eyetribe._samplefreq self.sampletime = 1000.0 * self.eyetribe._intsampletime # initiation report self.log("pygaze initiation report start") self.log("display resolution: %sx%s" % (self.dispsize[0],self.dispsize[1])) self.log("display size in cm: %sx%s" % (self.screensize[0],self.screensize[1])) self.log("samplerate: %.2f Hz" % self.samplerate) self.log("sampletime: %.2f ms" % self.sampletime) self.log("fixation threshold: %s degrees" % self.fixtresh) self.log("speed threshold: %s degrees/second" % self.spdtresh) self.log("acceleration threshold: %s degrees/second**2" % self.accthresh) self.log("pygaze initiation report end")
win.flip() waitKeys() win.flip() wait(2) ## open log for participant data storage log = open(str(pp) + "_data.txt", 'w') log.write("pp\ttrial\tid\toutcome\tdif\tinterim_ability\n") for j in range(trials): ## looking for min dif in ability diflist = abs(ability - dat['difficulty']) index = min((diflist[diflist.index[i]], i) for i in range(len(diflist))) ind = index[1] ## creating sounds snd1 = Sound(osc='sine', freq=dat['sound1'][dat.index[ind]], length=500) snd2 = Sound(osc='sine', freq=dat['sound2'][dat.index[ind]], length=500) win.flip() snd1.play() wait(1) snd2.play() ## indicating answer possibilities stim.draw() win.flip() keys = waitKeys(keyList=['q', 'z', 'slash']) press = keys[0] ## if the same and z then correct, else false. If not the same and z, correct etc. if press == 'q': break elif dat['sound1'][dat.index[ind]] == dat['sound2'][dat.index[ind]]: if press == 'z':
# create instances # initialize the display disp = Display() # initialize a screen scr = Screen() # initialize a keyboard kb = Keyboard(keylist=['space'], timeout=None) # initialize a mouse mouse = Mouse(mousebuttonlist=None, timeout=None) # initialize a sound snd = Sound(osc='sine', freq=4400, length=3000) sounds = { 'a sine wave (slightly oscillating)': Sound(osc='sine', freq=440, length=5000, attack=1000, decay=1000), 'a saw wave': Sound(osc='saw', freq=880, length=5000, attack=0, decay=0), 'a square wave': Sound(osc='square', freq=1760, length=5000, attack=0, decay=0), 'white noise': Sound(osc='whitenoise'), 'soundfile': Sound(soundfile=soundfile) } # initialize a Timer timer = Time()
# create instances # initialize the display disp = Display() # initialize a screen scr = Screen() # initialize an EyeTracker tracker = EyeTracker(disp) # initialize a keyboard kb = Keyboard(keylist=['space'],timeout=None) # initialize a sound snd = Sound(soundfile=soundfile) # initialize a Timer timer = Time() # create a new logfile log = Logfile(filename="test") log.write(["test", "time"]) # # # # # # welcome scr.draw_text("Welcome to the PyGaze Supertest!\n\nYou're going to be testing \ your PyGaze installation today, using this interactive tool. Press Space \ to start!\n\n\nP.S. If you see this, the following functions work: \
class Dummy(DumbDummy): """A dummy class to run experiments in dummy mode, where eye movements are simulated by the mouse""" def __init__(self, display): """Initiates an eyetracker dummy object, that simulates gaze position using the mouse arguments display -- a pygaze display.Display instance keyword arguments None """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, Dummy) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass self.recording = False self.blinking = False self.bbpos = (settings.DISPSIZE[0]/2, settings.DISPSIZE[1]/2) self.resolution = settings.DISPSIZE[:] self.simulator = Mouse(disptype=settings.DISPTYPE, mousebuttonlist=None, timeout=2, visible=False) self.kb = Keyboard(disptype=settings.DISPTYPE, keylist=None, timeout=None) self.angrybeep = Sound(osc='saw',freq=100, length=100, attack=0, decay=0, soundfile=None) self.display = display self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False) def calibrate(self): """Dummy calibration""" print("Calibration would now take place") clock.pause(1000) def drift_correction(self, pos=None, fix_triggered=False): """Dummy drift correction""" print("Drift correction would now take place") if fix_triggered: return self.fix_triggered_drift_correction(pos) if pos == None: pos = settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2 # show mouse self.simulator.set_visible(visible=True) # show fixation dot self.draw_drift_correction_target(pos[0], pos[1]) # perform drift check errdist = 60 # pixels (on a 1024x768px and 39.9x29.9cm monitor at 67 cm, this is about 2 degrees of visual angle) pressed = None while True: # check for keyboard input pressed, presstime = self.kb.get_key(keylist=['q','escape','space'], timeout=1) # quit key if pressed in ['q','escape']: # hide mouse self.simulator.set_visible(visible=False) return False # space bar elif pressed == 'space': # get sample gazepos = self.sample() # sample is close enough to fixation dot if ((gazepos[0]-pos[0])**2 + (gazepos[1]-pos[1])**2)**0.5 < errdist: # hide mouse self.simulator.set_visible(visible=False) return True # sample is NOT close enough to fixation dot else: # show discontent self.angrybeep.play() def fix_triggered_drift_correction(self, pos=None, min_samples=30, max_dev=60, reset_threshold=10): """Dummy drift correction (fixation triggered)""" print("Drift correction (fixation triggered) would now take place") if pos == None: pos = settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2 # show mouse self.simulator.set_visible(visible=True) # show fixation dot self.draw_drift_correction_target(pos[0], pos[1]) while True: # loop until we have sufficient samples lx = [] ly = [] while len(lx) < min_samples: # pressing escape enters the calibration screen if self.kb.get_key(keylist=["escape", "q"], timeout=0)[0] != None: self.recording = False print("libeyetracker.libeyetracker.fix_triggered_drift_correction(): 'q' pressed") self.simulator.set_visible(visible=False) return False # collect a sample x, y = self.sample() if len(lx) == 0 or x != lx[-1] or y != ly[-1]: # if present sample deviates too much from previous sample, reset counting if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold): lx = [] ly = [] # collect samples else: lx.append(x) ly.append(y) # check if samples are within max. deviation if len(lx) == min_samples: avg_x = sum(lx) / len(lx) avg_y = sum(ly) / len(ly) d = ((avg_x - pos[0]) ** 2 + (avg_y - pos[1]) ** 2)**0.5 if d < max_dev: self.simulator.set_visible(visible=False) return True else: lx = [] ly = [] def start_recording(self): """Dummy for starting recording, prints what would have been the recording start""" self.simulator.set_visible(visible=True) dumrectime = clock.get_time() self.recording = True print("Recording would have started at: " + str(dumrectime)) def stop_recording(self): """Dummy for stopping recording, prints what would have been the recording end""" self.simulator.set_visible(visible=False) dumrectime = clock.get_time() self.recording = False print("Recording would have stopped at: " + str(dumrectime)) def close(self): """Dummy for closing connection with eyetracker, prints what would have been connection closing time""" if self.recording: self.stop_recording() closetime = clock.get_time() print("eyetracker connection would have closed at: " + str(closetime)) def pupil_size(self): """Returns dummy pupil size""" return 19 def sample(self): """Returns simulated gaze position (=mouse position)""" if self.blinking: if self.simulator.get_pressed()[2]: # buttondown self.simulator.set_pos(pos=(self.bbpos[0],self.resolution[1])) # set position to blinking position elif not self.simulator.get_pressed()[2]: # buttonup self.simulator.set_pos(pos=self.bbpos) # set position to position before blinking self.blinking = False # 'blink' stopped elif not self.blinking: if self.simulator.get_pressed()[2]: # buttondown self.blinking = True # 'blink' started self.bbpos = self.simulator.get_pos() # position before blinking self.simulator.set_pos(pos=(self.bbpos[0],self.resolution[1])) # set position to blinking position return self.simulator.get_pos() def wait_for_saccade_start(self): """Returns starting time and starting position when a simulated saccade is started""" # function assumes that a 'saccade' has been started when a deviation of more than # maxerr from the initial 'gaze' position has been detected (using Pythagoras, ofcourse) spos = self.sample() # starting position maxerr = 3 # pixels while True: npos = self.sample() # get newest sample if ((spos[0]-npos[0])**2 + (spos[1]-npos[1])**2)**0.5 > maxerr: # Pythagoras break return clock.get_time(), spos def wait_for_saccade_end(self): """Returns ending time, starting and end position when a simulated saccade is ended""" # function assumes that a 'saccade' has ended when 'gaze' position remains reasonably # (i.e.: within maxerr) stable for five samples # for saccade start algorithm, see wait_for_fixation_start stime, spos = self.wait_for_saccade_start() maxerr = 3 # pixels # wait for reasonably stable position xl = [] # list for last five samples (x coordinate) yl = [] # list for last five samples (y coordinate) moving = True while moving: # check positions npos = self.sample() xl.append(npos[0]) # add newest sample yl.append(npos[1]) # add newest sample if len(xl) == 5: # check if deviation is small enough if max(xl)-min(xl) < maxerr and max(yl)-min(yl) < maxerr: moving = False # remove oldest sample xl.pop(0); yl.pop(0) # wait for a bit, to avoid immediately returning (runs go faster than mouse moves) clock.pause(10) return clock.get_time(), spos, (xl[len(xl)-1],yl[len(yl)-1]) def wait_for_fixation_start(self): """Returns starting time and position when a simulated fixation is started""" # function assumes a 'fixation' has started when 'gaze' position remains reasonably # stable for five samples in a row (same as saccade end) maxerr = 3 # pixels # wait for reasonably stable position xl = [] # list for last five samples (x coordinate) yl = [] # list for last five samples (y coordinate) moving = True while moving: npos = self.sample() xl.append(npos[0]) # add newest sample yl.append(npos[1]) # add newest sample if len(xl) == 5: # check if deviation is small enough if max(xl)-min(xl) < maxerr and max(yl)-min(yl) < maxerr: moving = False # remove oldest sample xl.pop(0); yl.pop(0) # wait for a bit, to avoid immediately returning (runs go faster than mouse moves) clock.pause(10) return clock.get_time(), (xl[len(xl)-1],yl[len(yl)-1]) def wait_for_fixation_end(self): """Returns time and gaze position when a simulated fixation is ended""" # function assumes that a 'fixation' has ended when a deviation of more than maxerr # from the initial 'fixation' position has been detected (using Pythagoras, ofcourse) stime, spos = self.wait_for_fixation_start() maxerr = 3 # pixels while True: npos = self.sample() # get newest sample if ((spos[0]-npos[0])**2 + (spos[1]-npos[1])**2)**0.5 > maxerr: # Pythagoras break return clock.get_time(), spos def wait_for_blink_start(self): """Returns starting time and position of a simulated blink (mousebuttondown)""" # blinks are simulated with mouseclicks: a right mouseclick simulates the closing # of the eyes, a mousebuttonup the opening. while not self.blinking: pos = self.sample() return clock.get_time(), pos def wait_for_blink_end(self): """Returns ending time and position of a simulated blink (mousebuttonup)""" # blinks are simulated with mouseclicks: a right mouseclick simulates the closing # of the eyes, a mousebuttonup the opening. # wait for blink start while not self.blinking: spos = self.sample() # wait for blink end while self.blinking: epos = self.sample() return clock.get_time(), epos def set_draw_drift_correction_target_func(self, func): """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker""" self.draw_drift_correction_target = func # *** # # Internal functions below # # *** def draw_drift_correction_target(self, x, y): """ Draws the drift-correction target. arguments x -- The X coordinate y -- The Y coordinate """ self.screen.clear() self.screen.draw_fixation(fixtype='dot', colour=settings.FGC, \ pos=(x,y), pw=0, diameter=12) self.display.fill(self.screen) self.display.show()
import pygaze.libtime as timer # Create a new Display instance. disp = Display() # Create a new Keyboard instance. kb = Keyboard() # Create a new Logfile instance. log = Logfile() log.write(["trialnr", "trial_type", "stimulus", \ "fix_onset", "stim_onset", "response", "RT", \ "correct"]) # Create a BAD sound. bad_sound = Sound(osc="whitenoise", length=200) bad_sound.set_volume(1) good_sound = Sound(osc="sine", freq=440, length=200) good_sound.set_volume(0.5) # Create a new Screen instance. scr = Screen() scr.draw_text("Welcome!", fontsize=100, \ colour=(255,100,100)) # Pass the screen to the display. disp.fill(scr) disp.show() timer.pause(3000) # Create a list of all trials.
class AleaTracker(BaseEyeTracker): """A class for AleaTracker objects""" def __init__(self, display, logfile=settings.LOGFILE, \ alea_key=settings.ALEAKEY, \ animated_calibration=settings.ALEAANIMATEDCALIBRATION, \ eventdetection=settings.EVENTDETECTION, \ saccade_velocity_threshold=35, \ saccade_acceleration_threshold=9500, \ blink_threshold=settings.BLINKTHRESH, \ **args): """Initializes the AleaTracker object arguments display -- a pygaze.display.Display instance keyword arguments logfile -- logfile name (string value); note that this is the name for the eye data log file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, AleaTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = self.disp.dispsize # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # show a message self.screen.clear() self.screen.draw_text( text="Initialising the eye tracker, please wait...", fontsize=20) self.disp.fill(self.screen) self.disp.show() # output file properties self.outputfile = logfile + '.tsv' # calibration properties self.animated_calibration = animated_calibration == True # eye tracker properties self.connected = False self.recording = False self.errdist = 2 # degrees; maximal error for drift correction self.pxerrdist = 30 # initial error in pixels self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1, -1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # connect to the tracker self.alea = OGAleaTracker(alea_key, file_path=self.outputfile) # get info on the sample rate # TODO: Compute after streaming some samples? self.samplerate = 60.0 self.sampletime = 1000.0 / self.samplerate # initiation report self.log("pygaze initiation report start") self.log("display resolution: {}x{}".format( \ self.dispsize[0], self.dispsize[1])) self.log("display size in cm: {}x{}".format( \ self.screensize[0], self.screensize[1])) self.log("samplerate: {} Hz".format(self.samplerate)) self.log("sampletime: {} ms".format(self.sampletime)) self.log("fixation threshold: {} degrees".format(self.fixtresh)) self.log("speed threshold: {} degrees/second".format(self.spdtresh)) self.log("acceleration threshold: {} degrees/second**2".format( \ self.accthresh)) self.log("pygaze initiation report end") def calibrate(self, animated=None, skip_bad_points=False): """Calibrates the eye tracking system arguments None keyword arguments animated -- bool. Set to True to show a parrot animation instead of calibration dots, or False to use standard points. Set to None to use default option. skip_bad_points -- bool. Intelligaze will skip difficult points when set to True. (Default = False) returns success -- returns True if calibration succeeded, or False if not; in addition a calibration log is added to the log file and some properties are updated (i.e. the thresholds for detection algorithms) """ # Process animated keyword argument. if animated is None: animated = self.animated_calibration if animated: img = "ANIMATION:PARROT" else: img = "" # Show a message. self.screen.clear() self.screen.draw_text(text="Running calibration in the foreground...", fontsize=20) self.disp.fill(self.screen) self.disp.show() # CALIBRATION # Re-run the calibration until it was approved by the user. quited = False calibration_approved = False while not calibration_approved: # Wait for the calibration to finish. status, improve = self.alea.calibrate(image=img, \ skip_bad_points=skip_bad_points) # Construct a message string. if status == 0: calib_str = "Calibration completed!" else: calib_str = "Calibration failed!" if improve: calib_str += "\n\nWARNING: IntelliGaze recommends repeating the calibration to improve accuracy." calib_str += "\n\n\nPress R to retry, or Space to continue." # Show calibration results. self.screen.clear() self.screen.draw_text(text=calib_str, fontsize=20) self.disp.fill(self.screen) self.disp.show() # Wait for user input. key = None while key not in ["r", "Space", "space", "q"]: key, keytime = self.kb.get_key(keylist=['q', 'r', 'space'], timeout=None, flush=True) # Process key press. if key in ["q", "Space", "space"]: calibration_approved = True if key == "q": quited = True # Calibration failed if the user quited. if quited: return False # NOISE CALIBRATION # Present noise calibration instructions. self.screen.clear() self.screen.draw_text( text="Noise calibration. Please look at the dot, and press any key to start.", fontsize=20, \ pos=(int(self.dispsize[0]/2),int(self.dispsize[1]*0.3))) self.screen.draw_fixation(fixtype="dot") self.disp.fill(self.screen) self.disp.show() # Wait for a keypress. key, keytime = self.kb.get_key(keylist=None, timeout=None, \ flush=True) # Start with empty lists. err = {'LX': [], 'LY': [], 'RX': [], 'RY': []} var = {'LX': [], 'LY': [], 'RX': [], 'RY': []} # Start streaming data so that samples can be obtained. self.start_recording() self.log("noise_calibration_start") # Present a central fixation. x = int(float(self.dispsize[0]) / 2.0) y = int(float(self.dispsize[1]) / 2.0) self.screen.clear() self.screen.draw_fixation(fixtype="dot", pos=(x, y)) self.disp.fill(self.screen) t0 = self.disp.show() # Collect at least 10 samples, and wait for at least 1 second. i = 0 while (i < 10) or (clock.get_time() - t0 < 1000): # Get new sample. gx, gy = self.sample() if (gx > 0) and (gy > 0): i += 1 err["LX"].append(abs(float(x) - float(gx))) err["LY"].append(abs(float(y) - float(gy))) err["RX"].append(abs(float(x) - float(gx))) err["RY"].append(abs(float(y) - float(gy))) for k in var.keys(): var[k].append(err[k][-1]**2) clock.pause(int(self.sampletime)) # Stop streaming. self.log("noise_calibration_stop") self.stop_recording() # Compute the RMS noise for the calibration points. xnoise = (math.sqrt(sum(var['LX']) / float(len(var['LX']))) + \ math.sqrt(sum(var['RX']) / float(len(var['RX'])))) / 2.0 ynoise = (math.sqrt(sum(var['LY']) / float(len(var['LY']))) + \ math.sqrt(sum(var['RY']) / float(len(var['RY'])))) / 2.0 self.pxdsttresh = (xnoise, ynoise) # AFTERMATH # store some variables pixpercm = (self.dispsize[0] / float(self.screensize[0]) + \ self.dispsize[1]/float(self.screensize[1])) / 2 screendist = settings.SCREENDIST # calculate thresholds based on tracker settings self.accuracy = ( \ (pix2deg(screendist, sum(err['LX']) / float(len(err['LX'])), pixpercm), \ pix2deg(screendist, sum(err['LY']) / float(len(err['LY'])), pixpercm)), \ (pix2deg(screendist, sum(err['RX']) / float(len(err['RX'])), pixpercm), \ pix2deg(screendist, sum(err['RY']) / float(len(err['RY'])), pixpercm))) self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm) self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm) self.pxaccuracy = ( \ (sum(err['LX']) / float(len(err['LX'])), \ sum(err['LY']) / float(len(err['LY']))), \ (sum(err['RX']) / float(len(err['RX'])), \ sum(err['RY']) / float(len(err['RY'])))) self.pxspdtresh = deg2pix(screendist, self.spdtresh / 1000.0, pixpercm) # in pixels per millisecond self.pxacctresh = deg2pix(screendist, self.accthresh / 1000.0, pixpercm) # in pixels per millisecond**2 # calibration report self.log("pygaze calibration report start") self.log("accuracy (degrees): LX={}, LY={}, RX={}, RY={}".format( \ self.accuracy[0][0], self.accuracy[0][1], self.accuracy[1][0], \ self.accuracy[1][1])) self.log("accuracy (in pixels): LX={}, LY={}, RX={}, RY={}".format( \ self.pxaccuracy[0][0], self.pxaccuracy[0][1], \ self.pxaccuracy[1][0], self.pxaccuracy[1][1])) self.log("precision (RMS noise in pixels): X={}, Y={}".format( \ self.pxdsttresh[0],self.pxdsttresh[1])) self.log("distance between participant and display: {} cm".format( \ screendist)) self.log("fixation threshold: {} pixels".format(self.pxfixtresh)) self.log("speed threshold: {} pixels/ms".format(self.pxspdtresh)) self.log("acceleration threshold: {} pixels/ms**2".format( \ self.pxacctresh)) self.log("pygaze calibration report end") return True def close(self): """Neatly close connection to tracker arguments None returns Nothing -- saves data and sets self.connected to False """ # close connection self.alea.close() self.connected = False def connected(self): """Checks if the tracker is connected arguments None returns connected -- True if connection is established, False if not; sets self.connected to the same value """ return self.connected def drift_correction(self, pos=None, fix_triggered=False): """Performs a drift check arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) fix_triggered -- Boolean indicating if drift check should be performed based on gaze position (fix_triggered = True) or on spacepress (fix_triggered = False) (default = False) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if pos == None: pos = (int(self.dispsize[0] / 2), int(self.dispsize[1] / 2)) if fix_triggered: return self.fix_triggered_drift_correction(pos) self.draw_drift_correction_target(pos[0], pos[1]) pressed = False while not pressed: pressed, presstime = self.kb.get_key() if pressed: if pressed in ["Escape", "escape", "q"]: print( "libalea.AleaTracker.drift_correction: 'q' or 'escape' pressed" ) return self.calibrate() gazepos = self.sample() if ((gazepos[0] - pos[0])**2 + (gazepos[1] - pos[1])**2)**0.5 < self.pxerrdist: return True else: self.errorbeep.play() return False def draw_drift_correction_target(self, x, y): """ Draws the drift-correction target. arguments x -- The X coordinate y -- The Y coordinate """ self.screen.clear() self.screen.draw_fixation(fixtype='dot', colour=settings.FGC, pos=(x, y), pw=0, diameter=12) self.disp.fill(self.screen) self.disp.show() def draw_calibration_target(self, x, y): self.draw_drift_correction_target(x, y) def fix_triggered_drift_correction(self, pos=None, min_samples=4, max_dev=120, timeout=10000): """Performs a fixation triggered drift correction by collecting a number of samples and calculating the average distance from the fixation position arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) min_samples -- minimal amount of samples after which a fixation is accepted (default = 4) max_dev -- maximal deviation from fixation in pixels (default = 120) timeout -- Time in milliseconds until fixation-triggering is given up on, and calibration is started (default = 10000) returns checked -- Boolean indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if pos == None: pos = (int(self.dispsize[0] / 2), int(self.dispsize[1] / 2)) self.draw_drift_correction_target(pos[0], pos[1]) t0 = clock.get_time() consecutive_count = 0 while consecutive_count < min_samples: # Get new sample. x, y = self.sample() # Ignore empty samples. if (x is None) or (y is None): continue # Measure the distance to the target position. d = ((x - pos[0])**2 + (y - pos[1])**2)**0.5 # Check whether the distance is below the allowed distance. if d <= max_dev: # Increment count. consecutive_count += 1 else: # Reset count. consecutive_count = 0 # Check for a timeout. if clock.get_time() - t0 > timeout: print( "libalea.AleaTracker.fix_triggered_drift_correction: timeout during fixation-triggered drift check" ) return self.calibrate() # Pressing escape enters the calibration screen. if self.kb.get_key()[0] in ["Escape", "escape", "q"]: print( "libalea.AleaTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed" ) return self.calibrate() return True def get_eyetracker_clock_async(self): """Not supported for AleaTracker (yet)""" print( "get_eyetracker_clock_async function not supported for AleaTracker" ) def log(self, msg): """Writes a message to the log file arguments ms -- a string to include in the log file returns Nothing -- uses native log function of iViewX to include a line in the log file """ self.alea.log(msg) def prepare_drift_correction(self, pos): """Not supported for AleaTracker""" print( "prepare_drift_correction function not supported for AleaTracker") def pupil_size(self): """Return pupil size arguments None returns pupil size -- returns pupil diameter for the eye that is currently being tracked (as specified by self.eye_used) or -1 when no data is obtainable """ # Get the latest sample. t, x, y, ps = self.alea.sample() # Invalid data. if ps == 0: return -1 # Check if the new pupil size is the same as the previous. if ps != self.prevps: # Update the pupil size. self.prevps = copy.copy(ps) return self.prevps def sample(self): """Returns newest available gaze position arguments None returns sample -- an (x,y) tuple or a (-1,-1) on an error """ # Get the latest sample. t, x, y, ps = self.alea.sample() # Invalid data. if (x == 0) and (y == 0): return (-1, -1) # Combine the x and y coordinates. s = (int(x), int(y)) # Check if the new sample is the same as the previous. if s != self.prevsample: # Update the current sample. self.prevsample = copy.copy(s) return self.prevsample def send_command(self, cmd): """Function not supported. """ print("send_command function not supported for AleaTracker") def start_recording(self): """Starts recording eye position arguments None returns Nothing -- sets self.recording to True when recording is successfully started """ self.alea.start_recording() self.recording = True def status_msg(self, msg): """Not supported for AleaTracker""" print("status_msg function not supported for AleaTracker") def stop_recording(self): """Stop recording eye position arguments None returns Nothing -- sets self.recording to False when recording is successfully started """ self.alea.stop_recording() self.recording = False def set_detection_type(self, eventdetection): """Set the event detection type to either PyGaze algorithms, or native algorithms as provided by the manufacturer (only if available: detection type will default to PyGaze if no native functions are available) arguments eventdetection -- a string indicating which detection type should be employed: either 'pygaze' for PyGaze event detection algorithms or 'native' for manufacturers algorithms (only if available; will default to 'pygaze' if no native event detection is available) returns -- detection type for saccades, fixations and blinks in a tuple, e.g. ('pygaze','native','native') when 'native' was passed, but native detection was not available for saccade detection """ if eventdetection in ['pygaze', 'native']: self.eventdetection = eventdetection return ('pygaze', 'pygaze', 'pygaze') def wait_for_event(self, event): """Waits for event arguments event -- an integer event code, one of the following: 3 = STARTBLINK 4 = ENDBLINK 5 = STARTSACC 6 = ENDSACC 7 = STARTFIX 8 = ENDFIX returns outcome -- a self.wait_for_* method is called, depending on the specified event; the return values of corresponding method are returned """ if event == 5: outcome = self.wait_for_saccade_start() elif event == 6: outcome = self.wait_for_saccade_end() elif event == 7: outcome = self.wait_for_fixation_start() elif event == 8: outcome = self.wait_for_fixation_end() elif event == 3: outcome = self.wait_for_blink_start() elif event == 4: outcome = self.wait_for_blink_end() else: raise Exception( "Error in libalea.AleaTracker.wait_for_event: eventcode {} is not supported" .format(event)) return outcome def wait_for_blink_end(self): """Waits for a blink end and returns the blink ending time arguments None returns timestamp -- blink ending time in milliseconds, as measured from experiment begin time """ # # # # # # Native method if self.eventdetection == 'native': print("WARNING! 'native' event detection not implemented") # # # # # # PyGaze method blinking = True # loop while there is a blink while blinking: # get newest sample gazepos = self.sample() # check if it's valid if self.is_valid_sample(gazepos): # if it is a valid sample, blinking has stopped blinking = False # return timestamp of blink end return clock.get_time() def wait_for_blink_start(self): """Waits for a blink start and returns the blink starting time arguments None returns timestamp -- blink starting time in milliseconds, as measured from experiment begin time """ # # # # # # Native method if self.eventdetection == 'native': print("WARNING! 'native' event detection not implemented") # # # # # # PyGaze method blinking = False # loop until there is a blink while not blinking: # get newest sample gazepos = self.sample() # check if it's a valid sample if not self.is_valid_sample(gazepos): # get timestamp for possible blink start t0 = clock.get_time() # loop until a blink is determined, or a valid sample occurs while not self.is_valid_sample(self.sample()): # check if time has surpassed BLINKTHRESH if clock.get_time() - t0 >= self.blinkthresh: # return timestamp of blink start return t0 def wait_for_fixation_end(self): """Returns time and gaze position when a fixation has ended; function assumes that a 'fixation' has ended when a deviation of more than self.pxfixtresh from the initial fixation position has been detected (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # Native method if self.eventdetection == 'native': print("WARNING! 'native' event detection not implemented") # # # # # # PyGaze method # function assumes that a 'fixation' has ended when a deviation of more than fixtresh # from the initial 'fixation' position has been detected # get starting time and position stime, spos = self.wait_for_fixation_start() # loop until fixation has ended while True: # get new sample npos = self.sample() # get newest sample # check if sample is valid if self.is_valid_sample(npos): # check if sample deviates to much from starting position if (npos[0] - spos[0])**2 + ( npos[1] - spos[1])**2 > self.pxfixtresh**2: # Pythagoras # break loop if deviation is too high break return clock.get_time(), spos def wait_for_fixation_start(self): """Returns starting time and position when a fixation is started; function assumes a 'fixation' has started when gaze position remains reasonably stable (i.e. when most deviant samples are within self.pxfixtresh) for five samples in a row (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # Native method if self.eventdetection == 'native': print("WARNING! 'native' event detection not implemented") # # # # # # PyGaze method # function assumes a 'fixation' has started when gaze position # remains reasonably stable for self.fixtimetresh # get starting position spos = self.sample() while not self.is_valid_sample(spos): spos = self.sample() # get starting time t0 = clock.get_time() # wait for reasonably stable position moving = True while moving: # get new sample npos = self.sample() # check if sample is valid if self.is_valid_sample(npos): # check if new sample is too far from starting position if (npos[0] - spos[0])**2 + ( npos[1] - spos[1])**2 > self.pxfixtresh**2: # Pythagoras # if not, reset starting position and time spos = copy.copy(npos) t0 = clock.get_time() # if new sample is close to starting sample else: # get timestamp t1 = clock.get_time() # check if fixation time threshold has been surpassed if t1 - t0 >= self.fixtimetresh: # return time and starting position return t1, spos def wait_for_saccade_end(self): """Returns ending time, starting and end position when a saccade is ended; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos, endpos -- endtime in milliseconds (from expbegintime); startpos and endpos are (x,y) gaze position tuples """ # # # # # # Native method if self.eventdetection == 'native': print("WARNING! 'native' event detection not implemented") # # # # # # PyGaze method # get starting position (no blinks) t0, spos = self.wait_for_saccade_start() # get valid sample prevpos = self.sample() while not self.is_valid_sample(prevpos): prevpos = self.sample() # get starting time, intersample distance, and velocity t1 = clock.get_time() s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])** 2)**0.5 # = intersample distance = speed in px/sample v0 = s / (t1 - t0) # run until velocity and acceleration go below threshold saccadic = True while saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # calculate distance s = ((newpos[0] - prevpos[0])**2 + (newpos[1] - prevpos[1])** 2)**0.5 # = speed in pixels/sample # calculate velocity v1 = s / (t1 - t0) # calculate acceleration a = (v1 - v0) / ( t1 - t0 ) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample) # check if velocity and acceleration are below threshold if v1 < self.pxspdtresh and (a > -1 * self.pxacctresh and a < 0): saccadic = False epos = newpos[:] etime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return etime, spos, epos def wait_for_saccade_start(self): """Returns starting time and starting position when a saccade is started; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos -- endtime in milliseconds (from expbegintime); startpos is an (x,y) gaze position tuple """ # # # # # # Native method if self.eventdetection == 'native': print("WARNING! 'native' event detection not implemented") # # # # # # PyGaze method # get starting position (no blinks) newpos = self.sample() while not self.is_valid_sample(newpos): newpos = self.sample() # get starting time, position, intersampledistance, and velocity t0 = clock.get_time() prevpos = newpos[:] s = 0 v0 = 0 # get samples saccadic = False while not saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # check if distance is larger than precision error sx = newpos[0] - prevpos[0] sy = newpos[1] - prevpos[1] if (sx / self.pxdsttresh[0])**2 + ( sy / self.pxdsttresh[1] )**2 > self.weightdist: # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise # calculate distance s = ((sx)**2 + (sy)** 2)**0.5 # intersampledistance = speed in pixels/ms # calculate velocity v1 = s / (t1 - t0) # calculate acceleration a = (v1 - v0) / (t1 - t0) # acceleration in pixels/ms**2 # check if either velocity or acceleration are above threshold values if v1 > self.pxspdtresh or a > self.pxacctresh: saccadic = True spos = prevpos[:] stime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return stime, spos def is_valid_sample(self, gazepos): """Checks if the sample provided is valid (for internal use) arguments gazepos -- a (x,y) gaze position tuple, as returned by self.sample() returns valid -- a Boolean: True on a valid sample, False on an invalid sample """ # return False if a sample is invalid if gazepos == (None, None) or gazepos == (-1, -1) or gazepos == (0, 0): return False # in any other case, the sample is valid return True
import random from pygaze.display import Display from pygaze.screen import Screen from pygaze.keyboard import Keyboard from pygaze.sound import Sound # create a new Display instance (to interact with the # monitor) disp = Display() # create a new Screen (to use as a canvas to draw on) scr = Screen() # Create two Sounds, one for nice and one for stern # feedback sine = Sound(osc='sine', freq=4000, length=500) noise = Sound(osc='whitenoise', length=500) # a list of vowels vowels = ['a', 'e', 'i', 'o', 'u', 'y'] # create a new Keyboard instance, to monitor key presses kb = Keyboard(keylist=vowels, timeout=None) # randomly choose one vowel letter = random.choice(vowels) # draw the vowel on a Screen scr.draw_text(text=letter, fontsize=128) # fill the Display with a Screen and update the monitor
# Create a colour wheel to select colours from for the stimuli. cw = create_colourwheel(STIML, STIMR, savefile='colourwheel.png') # PYGAZE # Initialise a new Display instance disp = Display() # Initialise a Screen instance for arbitrary drawing. scr = Screen() scr.draw_text(text="Initialising the experiment...", fontsize=FONTSIZE) disp.fill(scr) disp.show() # Initialise the ka-ching sound. kaching = Sound(soundfile=KACHING) # Initialise a Keyboard and a Mouse instance for participant interaction. kb = Keyboard() mouse = Mouse() # COMMUNICATIONS timer.pause(5000) _print("Initialising Client.") # Initialise a new Client instance. client = Client(multicast_ip) # Establish a connection with the server. scr.clear() scr.draw_text(text="Connecting to the server...", fontsize=FONTSIZE) disp.fill(scr)
def __init__(self, libeyelink, tracker): """ Constructor. Arguments: libeyelink -- A libeyelink object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.libeyelink = libeyelink self.display = libeyelink.display self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=0) self.mouse = Mouse(timeout=0) if settings.DISPTYPE == 'pygame': self.kb.set_timeout(timeout=0.001) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. #if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), '__eyelink__.jpg') # drawing properties self.xc = self.display.dispsize[0] / 2 self.yc = self.display.dispsize[1] / 2 self.extra_info = True self.ld = 40 # line distance self.fontsize = libeyelink.fontsize self.title = "" self.display_open = True self.draw_menu_screen() # beeps self.__target_beep__ = Sound(osc='sine', freq=440, length=50, attack=0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200, attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200, attack=0, decay=0, soundfile=None) # Colors self.color = { pylink.CR_HAIR_COLOR: pygame.Color('white'), pylink.PUPIL_HAIR_COLOR: pygame.Color('white'), pylink.PUPIL_BOX_COLOR: pygame.Color('green'), pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color('red'), pylink.MOUSE_CURSOR_COLOR: pygame.Color('red'), 'font': pygame.Color('white'), } # Font pygame.font.init() self.font = pygame.font.SysFont('Courier New', 11) # further properties self.state = None self.pal = None self.size = (0, 0) self.set_tracker(tracker) self.last_mouse_state = -1 self.bit64 = '64bit' in platform.architecture() self.imagebuffer = self.new_array()
class SMItracker(BaseEyeTracker): """A class for SMI eye tracker objects""" def __init__(self, display, ip='127.0.0.1', sendport=4444, receiveport= \ 5555, logfile=LOGFILE, eventdetection=EVENTDETECTION, \ saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, \ **args): """Initializes the SMItracker object arguments display -- a pygaze.display.Display instance keyword arguments ip -- internal ip address for iViewX (default = '127.0.0.1') sendport -- port number for iViewX sending (default = 4444) receiveport -- port number for iViewX receiving (default = 5555) logfile -- logfile name (string value); note that this is the name for the SMI logfile, NOT the .idf file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, SMITracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = DISPSIZE # display size in pixels self.screensize = SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw',freq=100, length=100) # output file properties self.outputfile = logfile self.description = "experiment" # TODO: EXPERIMENT NAME self.participant = "participant" # TODO: PP NAME # eye tracker properties self.connected = False self.recording = False self.eye_used = 0 # 0=left, 1=right, 2=binocular self.left_eye = 0 self.right_eye = 1 self.binocular = 2 self.errdist = 2 # degrees; maximal error for drift correction self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1,-1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # set logger res = iViewXAPI.iV_SetLogger(c_int(1), c_char_p(logfile + '_SMILOG.txt')) if res != 1: err = errorstring(res) raise Exception("Error in libsmi.SMItracker.__init__: failed to set logger; %s" % err) # first logger argument is for logging type (I'm guessing these are decimal bit codes) # LOG status bitcode # 1 = LOG_LEVEL_BUG 00001 # 2 = LOG_LEVEL_iV_FCT 00010 # 4 = LOG_LEVEL_ETCOM 00100 # 8 = LOG_LEVEL_ALL 01000 # 16 = LOG_LEVEL_IV_COMMAND 10000 # these can be used together, using a bitwise or, e.g.: 1|2|4 (bitcode 00111) # connect to iViewX res = iViewXAPI.iV_Connect(c_char_p(ip), c_int(sendport), c_char_p(ip), c_int(receiveport)) if res == 1: res = iViewXAPI.iV_GetSystemInfo(byref(systemData)) self.samplerate = systemData.samplerate self.sampletime = 1000.0 / self.samplerate if res != 1: err = errorstring(res) raise Exception("Error in libsmi.SMItracker.__init__: failed to get system information; %s" % err) # handle connection errors else: self.connected = False err = errorstring(res) raise Exception("Error in libsmi.SMItracker.__init__: establishing connection failed; %s" % err) # initiation report self.log("pygaze initiation report start") self.log("experiment: %s" % self.description) self.log("participant: %s" % self.participant) self.log("display resolution: %sx%s" % (self.dispsize[0],self.dispsize[1])) self.log("display size in cm: %sx%s" % (self.screensize[0],self.screensize[1])) self.log("samplerate: %s Hz" % self.samplerate) self.log("sampletime: %s ms" % self.sampletime) self.log("fixation threshold: %s degrees" % self.fixtresh) self.log("speed threshold: %s degrees/second" % self.spdtresh) self.log("acceleration threshold: %s degrees/second**2" % self.accthresh) self.log("pygaze initiation report end") def calibrate(self, calibrate=True, validate=True): """Calibrates the eye tracking system arguments None keyword arguments calibrate -- Boolean indicating if calibration should be performed (default = True) validate -- Boolean indicating if validation should be performed (default = True) returns success -- returns True if calibration succeeded, or False if not; in addition a calibration log is added to the log file and some properties are updated (i.e. the thresholds for detection algorithms) """ # TODO: # add feedback for calibration (e.g. with iV_GetAccuracyImage (struct ImageStruct * imageData) for accuracy and iV_GetEyeImage for cool eye pictures) # example: res = iViewXAPI.iV_GetEyeImage(byref(imageData)) # ImageStruct has four data fields: # imageHeight -- int vertical size (px) # imageWidth -- int horizontal size (px) # imageSize -- int image data size (byte) # imageBuffer -- pointer to image data (I have NO idea what format this is in) # configure calibration (NOT starting it) calibrationData = CCalibration(9, 1, 0, 1, 1, 0, 127, 1, 15, b"") # (method (i.e.: number of points), visualization, display, speed, auto, fg, bg, shape, size, filename) # setup calibration res = iViewXAPI.iV_SetupCalibration(byref(calibrationData)) if res != 1: err = errorstring(res) raise Exception("Error in libsmi.SMItracker.calibrate: failed to setup calibration; %s" % err) # calibrate cres = iViewXAPI.iV_Calibrate() # validate if calibration returns succes if cres == 1: cerr = None vres = iViewXAPI.iV_Validate() # handle validation errors if vres != 1: verr = errorstring(vres) else: verr = None ## # TEST # ## res = iViewXAPI.iV_GetAccuracyImage(byref(imageData)) ## self.log("IMAGEBUFFERSTART") ## self.log(imageData.imageBuffer) ## self.log("IMAGEBUFFERSTOP") ## print("Image height: %s, image width: %s, image size: %s" % (imageData.imageHeight,imageData.imageWidth, imageData.imageSize)) ## print imageData.imageBuffer ## ######## # handle calibration errors else: cerr = errorstring(cres) # return succes if cerr == None: print("libsmi.SMItracker.calibrate: calibration was succesful") if verr == None: print("libsmi.SMItracker.calibrate: validation was succesful") # present instructions self.disp.fill() # clear display self.screen.draw_text(text="Noise calibration: please look at the dot\n\n(press space to start)", pos=(self.dispsize[0]/2, int(self.dispsize[1]*0.2)), center=True) self.screen.draw_fixation(fixtype='dot') self.disp.fill(self.screen) self.disp.show() self.screen.clear() # clear screen again # wait for spacepress self.kb.get_key(keylist=['space'], timeout=None) # show fixation self.disp.fill() self.screen.draw_fixation(fixtype='dot') self.disp.fill(self.screen) self.disp.show() self.screen.clear() # wait for a bit, to allow participant to fixate clock.pause(500) # get samples sl = [self.sample()] # samplelist, prefilled with 1 sample to prevent sl[-1] from producing an error; first sample will be ignored for RMS calculation t0 = clock.get_time() # starting time while clock.get_time() - t0 < 1000: s = self.sample() # sample if s != sl[-1] and s != (-1,-1) and s != (0,0): sl.append(s) # calculate RMS noise Xvar = [] Yvar = [] for i in range(2,len(sl)): Xvar.append((sl[i][0]-sl[i-1][0])**2) Yvar.append((sl[i][1]-sl[i-1][1])**2) XRMS = (sum(Xvar) / len(Xvar))**0.5 YRMS = (sum(Yvar) / len(Yvar))**0.5 self.pxdsttresh = (XRMS, YRMS) # calculate pixels per cm pixpercm = (self.dispsize[0]/float(self.screensize[0]) + self.dispsize[1]/float(self.screensize[1])) / 2 # get accuracy res = 0; i = 0 while res != 1 and i < self.maxtries: # multiple tries, in case no (valid) sample is available res = iViewXAPI.iV_GetAccuracy(byref(accuracyData),0) # 0 is for 'no visualization' i += 1 clock.pause(int(self.sampletime)) # wait for sampletime if res == 1: self.accuracy = ((accuracyData.deviationLX,accuracyData.deviationLY), (accuracyData.deviationLX,accuracyData.deviationLY)) # dsttresh = (left tuple, right tuple); tuple = (horizontal deviation, vertical deviation) in degrees of visual angle else: err = errorstring(res) print("WARNING libsmi.SMItracker.calibrate: failed to obtain accuracy data; %s" % err) self.accuracy = ((2,2),(2,2)) print("libsmi.SMItracker.calibrate: As an estimate, the intersample distance threshhold was set to it's default value of 2 degrees") # get distance from screen to eyes (information from tracker) res = 0; i = 0 while res != 1 and i < self.maxtries: # multiple tries, in case no (valid) sample is available res = iViewXAPI.iV_GetSample(byref(sampleData)) i += 1 clock.pause(int(self.sampletime)) # wait for sampletime if res == 1: screendist = sampleData.leftEye.eyePositionZ / 10.0 # eyePositionZ is in mm; screendist is in cm else: err = errorstring(res) print("WARNING libsmi.SMItracker.calibrate: failed to obtain screen distance; %s" % err) screendist = SCREENDIST print("libsmi.SMItracker.calibrate: As an estimate, the screendistance was set to it's default value of 57 cm") # calculate thresholds based on tracker settings self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm) self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm) self.pxaccuracy = ((deg2pix(screendist, self.accuracy[0][0], pixpercm),deg2pix(screendist, self.accuracy[0][1], pixpercm)), (deg2pix(screendist, self.accuracy[1][0], pixpercm),deg2pix(screendist, self.accuracy[1][1], pixpercm))) self.pxspdtresh = deg2pix(screendist, self.spdtresh/1000.0, pixpercm) # in pixels per millisecond self.pxacctresh = deg2pix(screendist, self.accthresh/1000.0, pixpercm) # in pixels per millisecond**2 # calibration report self.log("pygaze calibration report start") self.log("accuracy (degrees): LX=%s, LY=%s, RX=%s, RY=%s" % (self.accuracy[0][0],self.accuracy[0][1],self.accuracy[1][0],self.accuracy[1][1])) self.log("accuracy (in pixels): LX=%s, LY=%s, RX=%s, RY=%s" % (self.pxaccuracy[0][0],self.pxaccuracy[0][1],self.pxaccuracy[1][0],self.pxaccuracy[1][1])) self.log("precision (RMS noise in pixels): X=%s, Y=%s" % (self.pxdsttresh[0],self.pxdsttresh[1])) self.log("distance between participant and display: %s cm" % screendist) self.log("fixation threshold: %s pixels" % self.pxfixtresh) self.log("speed threshold: %s pixels/ms" % self.pxspdtresh) self.log("acceleration threshold: %s pixels/ms**2" % self.pxacctresh) self.log("pygaze calibration report end") return True # validation error else: print("WARNING libsmi.SMItracker.calibrate: validation was unsuccesful %s" % verr) return False # calibration error else: print("WARNING libsmi.SMItracker.calibrate: calibration was unsuccesful; %s" % cerr) return False def close(self): """Neatly close connection to tracker arguments None returns Nothing -- saves data and sets self.connected to False """ # save data res = iViewXAPI.iV_SaveData(str(self.outputfile), str(self.description), str(self.participant), 1) if res != 1: err = errorstring(res) raise Exception("Error in libsmi.SMItracker.close: failed to save data; %s" % err) # close connection iViewXAPI.iV_Disconnect() self.connected = False def connected(self): """Checks if the tracker is connected arguments None returns connected -- True if connection is established, False if not; sets self.connected to the same value """ res = iViewXAPI.iV_IsConnected() if res == 1: self.connected = True else: self.connected = False return self.connected def drift_correction(self, pos=None, fix_triggered=False): """Performs a drift check arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) fix_triggered -- Boolean indicating if drift check should be performed based on gaze position (fix_triggered = True) or on spacepress (fix_triggered = False) (default = False) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if fix_triggered: return self.fix_triggered_drift_correction(pos) if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 pressed = False while not pressed: pressed, presstime = self.kb.get_key() if pressed: if pressed == 'escape' or pressed == 'q': print("libsmi.SMItracker.drift_correction: 'q' or 'escape' pressed") return self.calibrate(calibrate=True, validate=True) gazepos = self.sample() if ((gazepos[0]-pos[0])**2 + (gazepos[1]-pos[1])**2)**0.5 < self.pxerrdist: return True else: self.errorbeep.play() return False def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30): """Performs a fixation triggered drift correction by collecting a number of samples and calculating the average distance from the fixation position arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) min_samples -- minimal amount of samples after which an average deviation is calculated (default = 10) max_dev -- maximal deviation from fixation in pixels (default = 60) reset_threshold -- if the horizontal or vertical distance in pixels between two consecutive samples is larger than this threshold, the sample collection is reset (default = 30) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 # loop until we have sufficient samples lx = [] ly = [] while len(lx) < min_samples: # pressing escape enters the calibration screen if self.kb.get_key()[0] in ['escape','q']: print("libsmi.SMItracker.fix_triggered_drift_correction: 'q' or 'escape' pressed") return self.calibrate(calibrate=True, validate=True) # collect a sample x, y = self.sample() if len(lx) == 0 or x != lx[-1] or y != ly[-1]: # if present sample deviates too much from previous sample, reset counting if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold): lx = [] ly = [] # collect samples else: lx.append(x) ly.append(y) if len(lx) == min_samples: avg_x = sum(lx) / len(lx) avg_y = sum(ly) / len(ly) d = ((avg_x - pos[0]) ** 2 + (avg_y - pos[1]) ** 2)**0.5 if d < max_dev: return True else: lx = [] ly = [] def get_eyetracker_clock_async(self): """Not supported for SMItracker (yet)""" print("function not supported yet") def log(self, msg): """Writes a message to the log file arguments ms -- a string to include in the log file returns Nothing -- uses native log function of iViewX to include a line in the log file """ res = iViewXAPI.iV_Log(c_char_p(msg)) if res != 1: err = errorstring(res) print("WARNING libsmi.SMItracker.log: failed to log message '%s'; %s" % (msg,err)) def log_var(self, var, val): """Writes a variable to the log file arguments var -- variable name val -- variable value returns Nothing -- uses native log function of iViewX to include a line in the log file in a "var NAME VALUE" layout """ msg = "var %s %s" % (var, val) res = iViewXAPI.iV_Log(c_char_p(msg)) if res != 1: err = errorstring(res) print("WARNING libsmi.SMItracker.log_var: failed to log variable '%s' with value '%s'; %s" % (var,val,err)) def prepare_backdrop(self): """Not supported for SMItracker (yet)""" print("function not supported yet") def prepare_drift_correction(self, pos): """Not supported for SMItracker (yet)""" print("function not supported yet") def pupil_size(self): """Return pupil size arguments None returns pupil size -- returns pupil diameter for the eye that is currently being tracked (as specified by self.eye_used) or -1 when no data is obtainable """ res = iViewXAPI.iV_GetSample(byref(sampleData)) # if a new sample exists if res == 1: # left eye if self.eye_used == self.left_eye: ps = sampleData.leftEye.diam # right eye else: ps = sampleData.rightEye.diam # set prvious pupil size to newest pupil size self.prevps = ps return ps # no new sample available elif res == 2: return self.prevps # invalid data else: # print warning to interpreter err = errorstring(res) print("WARNING libsmi.SMItracker.pupil_size: failed to obtain sample; %s" % err) return -1 def sample(self): """Returns newest available gaze position arguments None returns sample -- an (x,y) tuple or a (-1,-1) on an error """ res = iViewXAPI.iV_GetSample(byref(sampleData)) if self.eye_used == self.right_eye: newsample = sampleData.rightEye.gazeX, sampleData.rightEye.gazeY else: newsample = sampleData.leftEye.gazeX, sampleData.leftEye.gazeY if res == 1: self.prevsample = newsample[:] return newsample elif res == 2: return self.prevsample else: err = errorstring(res) print("WARNING libsmi.SMItracker.sample: failed to obtain sample; %s" % err) return (-1,-1) def send_command(self, cmd): """Sends a command to the eye tracker arguments cmd -- the command (a string value) to be sent to iViewX returns Nothing """ try: iViewXAPI.iV_SendCommand(c_char_p(cmd)) except: raise Exception("Error in libsmi.SMItracker.send_command: failed to send remote command to iViewX (iV_SendCommand might be deprecated)") def set_backdrop(self): """Not supported for SMItracker (yet)""" print("function not supported yet") def set_eye_used(self): """Logs the eye_used variable, based on which eye was specified (if both eyes are being tracked, the left eye is used) arguments None returns Nothing -- logs which eye is used by calling self.log_var, e.g. self.log_var("eye_used", "right") """ if self.eye_used == self.right_eye: self.log_var("eye_used", "right") else: self.log_var("eye_used", "left") def start_recording(self): """Starts recording eye position arguments None returns Nothing -- sets self.recording to True when recording is successfully started """ res = 0; i = 0 while res != 1 and i < self.maxtries: res = iViewXAPI.iV_StartRecording() i += 1 if res == 1: self.recording = True else: self.recording = False err = errorstring(res) raise Exception("Error in libsmi.SMItracker.start_recording: %s" % err) def status_msg(self, msg): """Not supported for SMItracker (yet)""" print("function not supported yet") def stop_recording(self): """Stop recording eye position arguments None returns Nothing -- sets self.recording to False when recording is successfully started """ res = 0; i = 0 while res != 1 and i < self.maxtries: res = iViewXAPI.iV_StopRecording() i += 1 if res == 1: self.recording = False else: self.recording = False err = errorstring(res) raise Exception("Error in libsmi.SMItracker.stop_recording: %s" % err) def set_detection_type(self, eventdetection): """Set the event detection type to either PyGaze algorithms, or native algorithms as provided by the manufacturer (only if available: detection type will default to PyGaze if no native functions are available) arguments eventdetection -- a string indicating which detection type should be employed: either 'pygaze' for PyGaze event detection algorithms or 'native' for manufacturers algorithms (only if available; will default to 'pygaze' if no native event detection is available) returns -- detection type for saccades, fixations and blinks in a tuple, e.g. ('pygaze','native','native') when 'native' was passed, but native detection was not available for saccade detection """ if eventdetection in ['pygaze','native']: self.eventdetection = eventdetection return ('pygaze','native','pygaze') def wait_for_event(self, event): """Waits for event arguments event -- an integer event code, one of the following: 3 = STARTBLINK 4 = ENDBLINK 5 = STARTSACC 6 = ENDSACC 7 = STARTFIX 8 = ENDFIX returns outcome -- a self.wait_for_* method is called, depending on the specified event; the return values of corresponding method are returned """ if event == 5: outcome = self.wait_for_saccade_start() elif event == 6: outcome = self.wait_for_saccade_end() elif event == 7: outcome = self.wait_for_fixation_start() elif event == 8: outcome = self.wait_for_fixation_end() elif event == 3: outcome = self.wait_for_blink_start() elif event == 4: outcome = self.wait_for_blink_end() else: raise Exception("Error in libsmi.SMItracker.wait_for_event: eventcode %s is not supported" % event) return outcome def wait_for_blink_end(self): """Waits for a blink end and returns the blink ending time arguments None returns timestamp -- blink ending time in milliseconds, as measured from experiment begin time """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but SMI does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = True # loop while there is a blink while blinking: # get newest sample gazepos = self.sample() # check if it's valid if self.is_valid_sample(gazepos): # if it is a valid sample, blinking has stopped blinking = False # return timestamp of blink end return clock.get_time() def wait_for_blink_start(self): """Waits for a blink start and returns the blink starting time arguments None returns timestamp -- blink starting time in milliseconds, as measured from experiment begin time """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but SMI does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = False # loop until there is a blink while not blinking: # get newest sample gazepos = self.sample() # check if it's a valid sample if not self.is_valid_sample(gazepos): # get timestamp for possible blink start t0 = clock.get_time() # loop until a blink is determined, or a valid sample occurs while not self.is_valid_sample(self.sample()): # check if time has surpassed 150 ms if clock.get_time()-t0 >= 150: # return timestamp of blink start return t0 def wait_for_fixation_end(self): """Returns time and gaze position when a fixation has ended; function assumes that a 'fixation' has ended when a deviation of more than self.pxfixtresh from the initial fixation position has been detected (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # SMI method if self.eventdetection == 'native': moving = True while moving: # get newest event res = 0 while res != 1: res = iViewXAPI.iV_GetEvent(byref(eventData)) stime = clock.get_time() # check if event is a fixation (SMI only supports # fixations at the moment) if eventData.eventType == 'F': # get timestamp and starting position timediff = stime - (int(eventData.startTime) / 1000.0) etime = timediff + (int(eventData.endTime) / 1000.0) # time is in microseconds fixpos = (evenData.positionX, evenData.positionY) # return starting time and position return etime, fixpos # # # # # # PyGaze method else: # function assumes that a 'fixation' has ended when a deviation of more than fixtresh # from the initial 'fixation' position has been detected # get starting time and position stime, spos = self.wait_for_fixation_start() # loop until fixation has ended while True: # get new sample npos = self.sample() # get newest sample # check if sample is valid if self.is_valid_sample(npos): # check if sample deviates to much from starting position if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras # break loop if deviation is too high break return clock.get_time(), spos def wait_for_fixation_start(self): """Returns starting time and position when a fixation is started; function assumes a 'fixation' has started when gaze position remains reasonably stable (i.e. when most deviant samples are within self.pxfixtresh) for five samples in a row (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a fixation start # detection built into their API (only ending) print("WARNING! 'native' event detection has been selected, \ but SMI does not offer fixation START detection (only \ fixation ENDING; PyGaze algorithm will be used") # # # # # # PyGaze method # function assumes a 'fixation' has started when gaze position # remains reasonably stable for self.fixtimetresh # get starting position spos = self.sample() while not self.is_valid_sample(spos): spos = self.sample() # get starting time t0 = clock.get_time() # wait for reasonably stable position moving = True while moving: # get new sample npos = self.sample() # check if sample is valid if self.is_valid_sample(npos): # check if new sample is too far from starting position if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras # if not, reset starting position and time spos = copy.copy(npos) t0 = clock.get_time() # if new sample is close to starting sample else: # get timestamp t1 = clock.get_time() # check if fixation time threshold has been surpassed if t1 - t0 >= self.fixtimetresh: # return time and starting position return t1, spos def wait_for_saccade_end(self): """Returns ending time, starting and end position when a saccade is ended; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos, endpos -- endtime in milliseconds (from expbegintime); startpos and endpos are (x,y) gaze position tuples """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but SMI does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) t0, spos = self.wait_for_saccade_start() # get valid sample prevpos = self.sample() while not self.is_valid_sample(prevpos): prevpos = self.sample() # get starting time, intersample distance, and velocity t1 = clock.get_time() s = ((prevpos[0]-spos[0])**2 + (prevpos[1]-spos[1])**2)**0.5 # = intersample distance = speed in px/sample v0 = s / (t1-t0) # run until velocity and acceleration go below threshold saccadic = True while saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # calculate distance s = ((newpos[0]-prevpos[0])**2 + (newpos[1]-prevpos[1])**2)**0.5 # = speed in pixels/sample # calculate velocity v1 = s / (t1-t0) # calculate acceleration a = (v1-v0) / (t1-t0) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample) # check if velocity and acceleration are below threshold if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and a < 0): saccadic = False epos = newpos[:] etime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return etime, spos, epos def wait_for_saccade_start(self): """Returns starting time and starting position when a saccade is started; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos -- endtime in milliseconds (from expbegintime); startpos is an (x,y) gaze position tuple """ # # # # # # SMI method if self.eventdetection == 'native': # print warning, since SMI does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but SMI does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) newpos = self.sample() while not self.is_valid_sample(newpos): newpos = self.sample() # get starting time, position, intersampledistance, and velocity t0 = clock.get_time() prevpos = newpos[:] s = 0 v0 = 0 # get samples saccadic = False while not saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # check if distance is larger than precision error sx = newpos[0]-prevpos[0]; sy = newpos[1]-prevpos[1] if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 > self.weightdist: # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise # calculate distance s = ((sx)**2 + (sy)**2)**0.5 # intersampledistance = speed in pixels/ms # calculate velocity v1 = s / (t1-t0) # calculate acceleration a = (v1-v0) / (t1-t0) # acceleration in pixels/ms**2 # check if either velocity or acceleration are above threshold values if v1 > self.pxspdtresh or a > self.pxacctresh: saccadic = True spos = prevpos[:] stime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return stime, spos def is_valid_sample(self, gazepos): """Checks if the sample provided is valid, based on SMI specific criteria (for internal use) arguments gazepos -- a (x,y) gaze position tuple, as returned by self.sample() returns valid -- a Boolean: True on a valid sample, False on an invalid sample """ # return False if a sample is invalid if gazepos == (-1,-1): return False # sometimes, on SMI devices, invalid samples can actually contain # numbers; these do elif sum(gazepos) < 10 and 0.0 in gazepos: return False # in any other case, the sample is valid return True
class EyelinkGraphics(custom_display): """ Implements the EyeLink graphics that are shown on the experimental PC, such as the camera image, and the calibration dots. This class only implements the drawing operations, and little to no of the logic behind the set-up, which is implemented in PyLink. """ def __init__(self, display, tracker): """ Constructor. Arguments: display -- A PyGaze Display object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.display = display self.screen = Screen(disptype=DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=1) if DISPTYPE == 'pygame': self.kb.set_timeout(timeout=0.001) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. #if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), \ '__eyelink__.jpg') # drawing properties self.xc = self.display.dispsize[0]/2 self.yc = self.display.dispsize[1]/2 self.ld = 40 # line distance # menu self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False) self.menuscreen.draw_text(text="== Eyelink calibration menu ==", pos= \ (self.xc,self.yc-5*self.ld), center=True, font='mono', fontsize= \ 12, antialias=True) self.menuscreen.draw_text(text="Press C to calibrate", pos=(self.xc, \ self.yc-3*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) self.menuscreen.draw_text(text="Press V to validate", pos=(self.xc, \ self.yc-2*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) self.menuscreen.draw_text(text="Press A to auto-threshold", pos=( \ self.xc,self.yc-1*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) self.menuscreen.draw_text(text="Press Enter to show camera image", \ pos=(self.xc,self.yc+1*self.ld), center=True, font='mono', \ fontsize=12, antialias=True) self.menuscreen.draw_text(text= \ "(then change between images using the arrow keys)", pos=(self.xc, \ self.yc+2*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) self.menuscreen.draw_text(text="Press Q to exit menu", pos=(self.xc, \ self.yc+5*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) # beeps self.__target_beep__ = Sound(osc='sine', freq=440, length=50, attack= \ 0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200, \ attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200, \ attack=0, decay=0, soundfile=None) # further properties self.state = None self.imagebuffer = array.array('l') self.pal = None self.size = (0,0) self.set_tracker(tracker) self.last_mouse_state = -1 def set_tracker(self, tracker): """ Connects the tracker to the graphics environment. Arguments: tracker -- An tracker object as returned by pylink.EyeLink(). """ self.tracker = tracker self.tracker_version = tracker.getTrackerVersion() if self.tracker_version >= 3: self.tracker.sendCommand("enable_search_limits=YES") self.tracker.sendCommand("track_search_limits=YES") self.tracker.sendCommand("autothreshold_click=YES") self.tracker.sendCommand("autothreshold_repeat=YES") self.tracker.sendCommand("enable_camera_position_detect=YES") def setup_cal_display(self): """ Sets up the initial calibration display, which contains a menu with instructions. """ # show instructions self.display.fill(self.menuscreen) self.display.show() def exit_cal_display(self): """Exits calibration display.""" self.clear_cal_display() def record_abort_hide(self): """TODO: What does this do?""" pass def clear_cal_display(self): """Clears the calibration display""" self.display.fill() self.display.show() def erase_cal_target(self): """TODO: What does this do?""" self.clear_cal_display() def draw_cal_target(self, x, y): """ Draws calibration target. Arguments: x -- The X coordinate of the target. y -- The Y coordinate of the target. """ self.play_beep(pylink.CAL_TARG_BEEP) self.screen.clear() self.screen.draw_fixation(fixtype='dot', pos=(x,y)) self.display.fill(screen=self.screen) self.display.show() def play_beep(self, beepid): """ Plays a sound. Arguments: beepid -- A number that identifies the sound. """ if beepid == pylink.CAL_TARG_BEEP: # For some reason, playing the beep here doesn't work, so we have # to play it when the calibration target is drawn. if EYELINKCALBEEP: self.__target_beep__.play() elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP: # show a picture self.screen.clear() self.screen.draw_text(text= \ "calibration lost, press 'q' to return to menu", pos= \ (self.xc,self.yc), center=True, font='mono', fontsize=12, \ antialias=True) self.display.fill(self.screen) self.display.show() # play beep self.__target_beep__error__.play() elif beepid == pylink.CAL_GOOD_BEEP: self.screen.clear() if self.state == "calibration": self.screen.draw_text(text= \ "Calibration succesfull, press 'v' to validate", pos= \ (self.xc,self.yc), center=True, font='mono', fontsize=12, \ antialias=True) pass elif self.state == "validation": self.screen.draw_text(text= \ "Validation succesfull, press 'q' to return to menu", \ pos=(self.xc,self.yc), center=True, font='mono', fontsize= \ 12, antialias=True) pass else: self.screen.draw_text(text="Press 'q' to return to menu", pos= \ (self.xc,self.yc), center=True, font='mono', fontsize=12, \ antialias=True) pass # show screen self.display.fill(self.screen) self.display.show() # play beep self.__target_beep__done__.play() else: # DC_GOOD_BEEP or DC_TARG_BEEP pass def getColorFromIndex(self, i): """ Maps a PyLink color code onto a color-name string. Arguments: i -- A PyLink color code. Returns: A color-name string. """ print 'getColorFromIndex(%s)' % i if i == pylink.CR_HAIR_COLOR: return 'white' if i == pylink.PUPIL_HAIR_COLOR: return 'yellow' if i == pylink.PUPIL_BOX_COLOR: return 'green' if i == pylink.SEARCH_LIMIT_BOX_COLOR: return 'red' if i == pylink.MOUSE_CURSOR_COLOR: return 'blue' return 'black' def draw_line(self, x1, y1, x2, y2, colorindex): """Unused""" # Find out how this can be used print 'draw_line() %s %s %s %s' % (x1, y1, x2, y2) def draw_lozenge(self, x, y, width, height, colorindex): """Unused""" # Find out how this can be used print 'draw_lozenge() %s %s %s %s' % (x, y, width, height) def get_mouse_state(self): """Unused""" pass def get_input_key(self): """ Gets an input key. Returns: A list containing a single pylink key identifier. """ try: key, time = self.kb.get_key(keylist=None, timeout='default') except: self.esc_pressed = True key = 'q' if key == None: return None # Escape functions as a 'q' with the additional esc_pressed flag if key == 'escape': key = 'q' self.esc_pressed = True # Process regular keys if key == "return": keycode = pylink.ENTER_KEY self.state = None elif key == "space": keycode = ord(" ") elif key == "q": keycode = pylink.ESC_KEY self.state = None elif key == "c": keycode = ord("c") self.state = "calibration" elif key == "v": keycode = ord("v") self.state = "validation" elif key == "a": keycode = ord("a") elif key == "up": keycode = pylink.CURS_UP elif key == "down": keycode = pylink.CURS_DOWN elif key == "left": keycode = pylink.CURS_LEFT elif key == "right": keycode = pylink.CURS_RIGHT else: keycode = 0 # Convert key to PyLink keycode and return return [pylink.KeyInput(keycode, 0)] # 0 = pygame.KMOD_NONE def exit_image_display(self): """Exits the image display.""" self.clear_cal_display() def alert_printf(self,msg): """ Prints alert message. Arguments: msg -- The message to be played. """ print "eyelink_graphics.alert_printf(): %s" % msg def setup_image_display(self, width, height): """ Initializes the buffer that will contain the camera image. Arguments: width -- The width of the image. height -- The height of the image. """ self.size = (width,height) self.clear_cal_display() self.last_mouse_state = -1 self.imagebuffer = array.array('l') def image_title(self, text): """ TODO: What does this do? Arguments: text -- Unknown. """ pass def draw_image_line(self, width, line, totlines, buff): """ Draws a single eye video frame, line by line. Arguments: width -- Width of the video. line -- Line nr of current line. totlines -- Total lines in video. buff -- Frame buffer. imagesize -- The size of the image, which is (usually?) 192x160 px. """ # If the buffer hasn't been filled yet, add a line. for i in range(width): try: self.imagebuffer.append(self.pal[buff[i]]) except: pass # If the buffer is full, push it to the display. if line == totlines: # First create a PIL image, then convert it to a PyGame image, and # then save it to a temporary file on disk. This juggling with # formats is necessary to show the image without distortions under # (so far) all conditions. Surprisingly, it doesn't cause any # appreciable delays, relative to directly invoking PyGame or # PsychoPy functions. bufferv = self.imagebuffer.tostring() img = Image.new("RGBX", self.size) imgsz = self.xc, self.yc img.fromstring(bufferv) img = img.resize(imgsz) img = pygame.image.fromstring(img.tostring(), imgsz, 'RGBX') pygame.image.save(img, self.tmp_file) # ... and then show the image. self.screen.clear() self.screen.draw_image(self.tmp_file) self.display.fill(self.screen) self.display.show() # Clear the buffer for the next round! self.imagebuffer = array.array('l') def set_image_palette(self, r, g, b): """ Sets the image palette. TODO: What this function actually does is highly mysterious. Figure it out! Arguments: r -- The red channel. g -- The green channel. b -- The blue channel. """ self.imagebuffer = array.array('l') self.clear_cal_display() sz = len(r) i = 0 self.pal = [] while i < sz: rf = int(b[i]) gf = int(g[i]) bf = int(r[i]) self.pal.append((rf<<16) | (gf<<8) | (bf)) i += 1
class EyelinkGraphics(custom_display): """ Implements the EyeLink graphics that are shown on the experimental PC, such as the camera image, and the calibration dots. This class only implements the drawing operations, and little to no of the logic behind the set-up, which is implemented in PyLink. """ def __init__(self, libeyelink, tracker): """ Constructor. Arguments: libeyelink -- A libeyelink object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.libeyelink = libeyelink self.display = libeyelink.display self.screen = Screen(disptype=DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=0) self.mouse = Mouse(timeout=0) if DISPTYPE == "pygame": self.kb.set_timeout(timeout=0.001) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. # if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), "__eyelink__.jpg") # drawing properties self.xc = self.display.dispsize[0] / 2 self.yc = self.display.dispsize[1] / 2 self.extra_info = True self.ld = 40 # line distance self.fontsize = libeyelink.fontsize self.title = "" self.display_open = True # menu self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False) self.menuscreen.draw_text( text="Eyelink calibration menu", pos=(self.xc, self.yc - 6 * self.ld), center=True, font="mono", fontsize=int(2 * self.fontsize), antialias=True, ) self.menuscreen.draw_text( text="%s (pygaze %s, pylink %s)" % (libeyelink.eyelink_model, pygaze.version, pylink.__version__), pos=(self.xc, self.yc - 5 * self.ld), center=True, font="mono", fontsize=int(0.8 * self.fontsize), antialias=True, ) self.menuscreen.draw_text( text="Press C to calibrate", pos=(self.xc, self.yc - 3 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press V to validate", pos=(self.xc, self.yc - 2 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press A to auto-threshold", pos=(self.xc, self.yc - 1 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press I to toggle extra info in camera image", pos=(self.xc, self.yc - 0 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press Enter to show camera image", pos=(self.xc, self.yc + 1 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="(then change between images using the arrow keys)", pos=(self.xc, self.yc + 2 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press Escape to abort experiment", pos=(self.xc, self.yc + 4 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.menuscreen.draw_text( text="Press Q to exit menu", pos=(self.xc, self.yc + 5 * self.ld), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) # beeps self.__target_beep__ = Sound(osc="sine", freq=440, length=50, attack=0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc="sine", freq=880, length=200, attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc="sine", freq=220, length=200, attack=0, decay=0, soundfile=None) # Colors self.color = { pylink.CR_HAIR_COLOR: pygame.Color("white"), pylink.PUPIL_HAIR_COLOR: pygame.Color("white"), pylink.PUPIL_BOX_COLOR: pygame.Color("green"), pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color("red"), pylink.MOUSE_CURSOR_COLOR: pygame.Color("red"), "font": pygame.Color("white"), } # Font pygame.font.init() self.font = pygame.font.SysFont("Courier New", 11) # further properties self.state = None self.pal = None self.size = (0, 0) self.set_tracker(tracker) self.last_mouse_state = -1 self.bit64 = "64bit" in platform.architecture() self.imagebuffer = self.new_array() def close(self): """ Is called when the connection and display are shutting down. """ self.display_open = False def new_array(self): """ Creates a new array with a system-specific format. Returns: An array. """ # On 64 bit Linux, we need to use an unsigned int data format. # <https://www.sr-support.com/showthread.php?3215-Visual-glitch-when-/ # sending-eye-image-to-display-PC&highlight=ubuntu+pylink> if os.name == "posix" and self.bit64: return array.array("I") return array.array("L") def set_tracker(self, tracker): """ Connects the tracker to the graphics environment. Arguments: tracker -- An tracker object as returned by pylink.EyeLink(). """ self.tracker = tracker self.tracker_version = tracker.getTrackerVersion() if self.tracker_version >= 3: self.tracker.sendCommand("enable_search_limits=YES") self.tracker.sendCommand("track_search_limits=YES") self.tracker.sendCommand("autothreshold_click=YES") self.tracker.sendCommand("autothreshold_repeat=YES") self.tracker.sendCommand("enable_camera_position_detect=YES") def setup_cal_display(self): """ Sets up the initial calibration display, which contains a menu with instructions. """ # show instructions self.display.fill(self.menuscreen) self.display.show() def exit_cal_display(self): """Exits calibration display.""" self.clear_cal_display() def record_abort_hide(self): """TODO: What does this do?""" pass def clear_cal_display(self): """Clears the calibration display""" self.display.fill() self.display.show() def erase_cal_target(self): """TODO: What does this do?""" self.clear_cal_display() def draw_cal_target(self, x, y): """ Draws calibration target. Arguments: x -- The X coordinate of the target. y -- The Y coordinate of the target. """ self.play_beep(pylink.CAL_TARG_BEEP) self.screen.clear() self.screen.draw_fixation(fixtype="dot", pos=(x, y)) self.display.fill(screen=self.screen) self.display.show() def play_beep(self, beepid): """ Plays a sound. Arguments: beepid -- A number that identifies the sound. """ if beepid == pylink.CAL_TARG_BEEP: # For some reason, playing the beep here doesn't work, so we have # to play it when the calibration target is drawn. if EYELINKCALBEEP: self.__target_beep__.play() elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP: # show a picture self.screen.clear() self.screen.draw_text( text="calibration lost, press 'Enter' to return to menu", pos=(self.xc, self.yc), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) self.display.fill(self.screen) self.display.show() # play beep self.__target_beep__error__.play() elif beepid == pylink.CAL_GOOD_BEEP: self.screen.clear() if self.state == "calibration": self.screen.draw_text( text="Calibration succesfull, press 'v' to validate", pos=(self.xc, self.yc), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) elif self.state == "validation": self.screen.draw_text( text="Validation succesfull, press 'Enter' to return to menu", pos=(self.xc, self.yc), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) else: self.screen.draw_text( text="Press 'Enter' to return to menu", pos=(self.xc, self.yc), center=True, font="mono", fontsize=self.fontsize, antialias=True, ) # show screen self.display.fill(self.screen) self.display.show() # play beep self.__target_beep__done__.play() else: # DC_GOOD_BEEP or DC_TARG_BEEP pass def draw_line(self, x1, y1, x2, y2, colorindex): """ Unlike the function name suggests, this draws a single pixel. I.e. the end coordinates are always exactly one pixel away from the start coordinates. Arguments: x1 -- The starting x. y1 -- The starting y. x2 -- The end x. y2 -- The end y. colorIndex -- A color index. """ x1 = int(self.scale * x1) y1 = int(self.scale * y1) x2 = int(self.scale * x2) y2 = int(self.scale * y2) pygame.draw.line(self.cam_img, self.color[colorindex], (x1, y1), (x2, y2)) def draw_lozenge(self, x, y, w, h, colorindex): """ desc: Draws a rectangle. arguments: x: desc: X coordinate. type: int y: desc: Y coordinate. type: int w: desc: A width. type: int h: desc: A height. type: int colorindex: desc: A colorindex. type: int """ x = int(self.scale * x) y = int(self.scale * y) w = int(self.scale * w) h = int(self.scale * h) pygame.draw.rect(self.cam_img, self.color[colorindex], (x, y, w, h), 2) def draw_title(self): """ desc: Draws title info. """ y = 0 for line in self.title: surf = self.font.render(line, 0, self.color["font"]) self.cam_img.blit(surf, (1, y)) y += 12 def get_mouse_state(self): """ desc: Gets the mouse position and state. returns: desc: A (pos, state) tuple. type: tuple. """ button, pos, time = self.mouse.get_clicked() if button == None: button = -1 if pos == None: pos = self.mouse.get_pos() return pos, button def get_input_key(self): """ Gets an input key. Returns: A list containing a single pylink key identifier. """ # Don't try to collect key presses when the display is no longer # available. This is necessary, because pylink polls key presses during # file transfer, which generally occurs after the display has been # closed. if not self.display_open: return None try: key, time = self.kb.get_key(keylist=None, timeout="default") except: self.esc_pressed = True key = "q" if key == None: return None # Escape functions as a 'q' with the additional esc_pressed flag if key == "escape": key = "q" self.esc_pressed = True # Process regular keys if key == "return": keycode = pylink.ENTER_KEY self.state = None elif key == "space": keycode = ord(" ") elif key == "q": keycode = pylink.ESC_KEY self.state = None elif key == "c": keycode = ord("c") self.state = "calibration" elif key == "v": keycode = ord("v") self.state = "validation" elif key == "a": keycode = ord("a") elif key == "i": self.extra_info = not self.extra_info keycode = 0 elif key == "up": keycode = pylink.CURS_UP elif key == "down": keycode = pylink.CURS_DOWN elif key == "left": keycode = pylink.CURS_LEFT elif key == "right": keycode = pylink.CURS_RIGHT else: keycode = 0 # Convert key to PyLink keycode and return return [pylink.KeyInput(keycode, 0)] # 0 = pygame.KMOD_NONE def exit_image_display(self): """Exits the image display.""" self.clear_cal_display() def alert_printf(self, msg): """ Prints alert message. Arguments: msg -- The message to be played. """ print "eyelink_graphics.alert_printf(): %s" % msg def setup_image_display(self, width, height): """ Initializes the buffer that will contain the camera image. Arguments: width -- The width of the image. height -- The height of the image. """ self.size = width, height self.clear_cal_display() self.last_mouse_state = -1 self.imagebuffer = self.new_array() def image_title(self, text): """ Sets the current image title. Arguments: text -- An image title. """ while ": " in text: text = text.replace(": ", ":") self.title = text.split() def draw_image_line(self, width, line, totlines, buff): """ Draws a single eye video frame, line by line. Arguments: width -- Width of the video. line -- Line nr of current line. totlines -- Total lines in video. buff -- Frame buffer. imagesize -- The size of the image, which is (usually?) 192x160 px. """ # If the buffer hasn't been filled yet, add a line. for i in range(width): try: self.imagebuffer.append(self.pal[buff[i]]) except: pass # If the buffer is full, push it to the display. if line == totlines: self.scale = totlines / 320.0 self._size = int(self.scale * self.size[0]), int(self.scale * self.size[1]) # Convert the image buffer to a pygame image, save it ... self.cam_img = pygame.image.fromstring(self.imagebuffer.tostring(), self._size, "RGBX") if self.extra_info: self.draw_cross_hair() self.draw_title() pygame.image.save(self.cam_img, self.tmp_file) # ... and then show the image. self.screen.clear() self.screen.draw_image(self.tmp_file, scale=1.5 / self.scale) self.display.fill(self.screen) self.display.show() # Clear the buffer for the next round! self.imagebuffer = self.new_array() def set_image_palette(self, r, g, b): """ Sets the image palette. TODO: What this function actually does is highly mysterious. Figure it out! Arguments: r -- The red channel. g -- The green channel. b -- The blue channel. """ self.imagebuffer = self.new_array() self.clear_cal_display() sz = len(r) i = 0 self.pal = [] while i < sz: rf = int(b[i]) gf = int(g[i]) bf = int(r[i]) self.pal.append((rf << 16) | (gf << 8) | (bf)) i += 1
def __init__(self, display, tracker): """ Constructor. Arguments: display -- A PyGaze Display object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.display = display self.screen = Screen(disptype=DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=1) if DISPTYPE == 'pygame': self.kb.set_timeout(timeout=0.001) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. #if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), \ '__eyelink__.jpg') # drawing properties self.xc = self.display.dispsize[0]/2 self.yc = self.display.dispsize[1]/2 self.ld = 40 # line distance # menu self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False) self.menuscreen.draw_text(text="== Eyelink calibration menu ==", pos= \ (self.xc,self.yc-5*self.ld), center=True, font='mono', fontsize= \ 12, antialias=True) self.menuscreen.draw_text(text="Press C to calibrate", pos=(self.xc, \ self.yc-3*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) self.menuscreen.draw_text(text="Press V to validate", pos=(self.xc, \ self.yc-2*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) self.menuscreen.draw_text(text="Press A to auto-threshold", pos=( \ self.xc,self.yc-1*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) self.menuscreen.draw_text(text="Press Enter to show camera image", \ pos=(self.xc,self.yc+1*self.ld), center=True, font='mono', \ fontsize=12, antialias=True) self.menuscreen.draw_text(text= \ "(then change between images using the arrow keys)", pos=(self.xc, \ self.yc+2*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) self.menuscreen.draw_text(text="Press Q to exit menu", pos=(self.xc, \ self.yc+5*self.ld), center=True, font='mono', fontsize=12, \ antialias=True) # beeps self.__target_beep__ = Sound(osc='sine', freq=440, length=50, attack= \ 0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200, \ attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200, \ attack=0, decay=0, soundfile=None) # further properties self.state = None self.imagebuffer = array.array('l') self.pal = None self.size = (0,0) self.set_tracker(tracker) self.last_mouse_state = -1
class EyeLogicTracker(BaseEyeTracker): ## Initializes the EyeTracker object. def __init__(self, display, logfile=settings.LOGFILE, \ eventdetection=settings.EVENTDETECTION, \ saccade_velocity_threshold=35, \ saccade_acceleration_threshold=9500, \ blink_threshold=settings.BLINKTHRESH, \ **args): # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, EyeLogicTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass self.disp = display self.screen = Screen() self.dispsize = self.disp.dispsize # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # show a message self.screen.clear() self.screen.draw_text( text="Initialising the eye tracker, please wait...", fontsize=20) self.disp.fill(self.screen) self.disp.show() # output file properties self.logfile = logfile # eye tracker properties self._recording = Event() self._recording.clear() self._calibrated = Event() self._calibrated.clear() self.eye_used = 2 # 0=left, 1=right, 2=binocular self.sampleLock = Lock() self.lastSample = None self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) # event detection properties self.pxfixtresh = 50; self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method self.eventdetection = eventdetection self._log_vars = [ \ "timestampMicroSec", \ "index", \ "porFilteredX", \ "porFilteredY", \ "porLeftX", \ "porLeftY", \ "pupilRadiusLeft", \ "porRightX", \ "porRightY", \ "pupilRadiusRight", \ ] # Open a new log file. dir_name = os.path.dirname(logfile) file_name = os.path.basename(logfile) name, ext = os.path.splitext(file_name) self._data_file_path = os.path.join(dir_name, name+".eyelogic.csv") self._log_file = open(self._data_file_path, "w") # Write a header to the log. header = ["TYPE"] header.extend(self._log_vars) self._sep = ";" self._log_file.write("Sep="+self._sep+"\n") self._log_file.write(self._sep.join(map(str, header))) # Create a lock to prevent simultaneous access to the log file. self._logging_queue = Queue() self._logging_queue_empty = Event() self._logging_queue_empty.set() self._connected = Event() self._connected.set() self._log_counter = 0 self._log_consolidation_freq = 60 self._logging_thread = Thread( target=self.loggingThread, \ name='PyGaze_EyeLogic_Logging', args=[]) global g_api g_api = self # log self.log("pygaze initiation") #self.log("experiment = {}".format(self.description)) #self.log("participant = {}".format(self.participant)) self.log("display resolution = {}x{}".format(self.dispsize[0], \ self.dispsize[1])) self.log("display size in cm = {}x{}".format(self.screensize[0], \ self.screensize[1])) self.log("fixation threshold = {} degrees".format(self.fixtresh)) self.log("speed threshold = {} degrees/second".format(self.spdtresh)) self.log("acceleration threshold = {} degrees/second**2".format( \ self.accthresh)) # connect self.api = ELApi( "PyGaze" ) self.api.registerGazeSampleCallback( gazeSampleCallback ) self.api.registerEventCallback( eventCallback ) resultConnect = self.api.connect() if (resultConnect != ELApi.ReturnConnect.SUCCESS): self._connected.clear() raise Exception("Cannot connect to EyeLogic server = {}".format(errorstringConnect(resultConnect))) self._connected.set() screenConfig = self.api.getScreenConfig() self.log("eye tracker is mounted on screen {}".format(screenConfig.id)) self.rawResolution = (screenConfig.resolutionX, screenConfig.resolutionY) self.log("raw screen resolution = {}x{}".format( self.rawResolution[0], self.rawResolution[1])) self.log("end pygaze initiation") deviceConfig = self.api.getDeviceConfig() if (deviceConfig.deviceSerial == 0): raise Exception("no eye tracking device connected") if (len(deviceConfig.frameRates) == 0): raise Exception("failed to read out device configuration") g_api.sampleRate = deviceConfig.frameRates[0] g_api.sampleTime = 1000.0 / g_api.sampleRate g_api.log("samplerate = {} Hz".format(g_api.sampleRate)) g_api.log("sampletime = {} ms".format(g_api.sampleTime)) self._logging_thread.start() self.screen.clear() self.disp.fill(self.screen) self.disp.show() def loggingThread(self): while self._connected.is_set(): # Check if the sample Queue is empty. if self._logging_queue.empty(): # Signal to other Threads that the logging Queue is empty. if not self._logging_queue_empty.is_set(): self._logging_queue_empty.set() # Process data from the Queue. else: # Signal to other Threads that the Queue isn't empty. if self._logging_queue_empty.is_set(): self._logging_queue_empty.clear() # Get the next object from the Queue. sample = self._logging_queue.get() # Log the message string and/or the sample. if type(sample) in [tuple, list]: self._write_tuple(sample) elif type(sample) == ELGazeSample: self._write_sample(sample) else: print("WARNING = Unrecognised object in log queue = '{}'".format( \ sample)) # Increment the log counter. self._log_counter += 1 # Check if the log file needs to be consolidated. if self._log_counter % self._log_consolidation_freq == 0: # Internal buffer to RAM. self._log_file.flush() # RAM to disk. os.fsync(self._log_file.fileno()) # Release the log file lock. def _write_sample(self, sample): # Construct a list with the sample data. line = ["DAT"] for var in self._log_vars: line.append(sample.__getattribute__(var)) # Log the sample to the log file. self._log_file.write("\n" + self._sep.join(map(str, line))) def _write_tuple(self, tup): # Construct a list values that need to be logged. line = [] # Add the values that need to be logged. Usually this will be ("MSG", # timestamp, message). line.extend(tup) # Pad the list so that it will be of equal length to the sample # lines, which makes it easier to be read into a spreadsheet editor # and by some read_csv functions. line.extend([""] * (len(self._log_vars) - len(line) - 1)) # Log the line to the log file. self._log_file.write("\n" + self._sep.join(map(str, line))) ## Calibrates the eye tracking system. def calibrate(self): #self.screen.clear() #self.screen.draw_text( # text="Calibrate EyeTracker", # fontsize=20) #self.disp.fill(self.screen) #self.disp.show() if (not self._recording.is_set()): resultTracking = self.api.requestTracking(0) if (resultTracking != ELApi.ReturnStart.SUCCESS): raise Exception("unable to start eye tracker") resultCalibrate = self.api.calibrate(0) if (resultCalibrate != ELApi.ReturnCalibrate.SUCCESS): self.api.unrequestTracking() self.errorbeep.play() raise Exception("Calibration failed = {}".format(errorstringCalibrate(resultCalibrate))) self._calibrated.set() # NOISE CALIBRATION self.screen.clear() self.screen.draw_text( text="Noise calibration. Please look at the dot, and press any key to start.", fontsize=20, \ pos=(int(self.dispsize[0]/2),int(self.dispsize[1]*0.3))) x = int(float(self.dispsize[0]) / 2.0) y = int(float(self.dispsize[1]) / 2.0) self.screen.draw_fixation(fixtype="dot", pos=(x,y)) self.disp.fill(self.screen) self.disp.show() self.kb.get_key(keylist=None, timeout=None, flush=True) # wait for a bit, to allow participant to fixate clock.pause(500) # get distance to screen screendist = 0 i = 0 while screendist == 0 and i < self.maxtries: i = i+1 self.sampleLock.acquire() if (self.lastSample is not None): if self.eye_used != 1 and self.lastSample.eyePositionLeftZ != ELInvalidValue: screendist = self.lastSample.eyePositionLeftZ / 10.0 # eyePositionZ is in mm; screendist is in cm elif self.eye_used != 0 and self.lastSample.eyePositionRightZ != ELInvalidValue: screendist = self.lastSample.eyePositionRightZ / 10.0 self.sampleLock.release() clock.pause(int(self.sampleTime)) if i >= self.maxtries: self.api.unrequestTracking() self.errorbeep.play() raise Exception("unable to receive gaze data for noise calibration") # get samples sl = [self.sample()] # samplelist, prefilled with 1 sample to prevent sl[-1] from producing an error; first sample will be ignored for RMS calculation t0 = clock.get_time() # starting time while clock.get_time() - t0 < 1000: s = self.sample() # sample if s[0] != -1 and s[1] != -1 and s[0] != ELInvalidValue and s[1] != ELInvalidValue: sl.append(s) clock.pause(int(self.sampleTime)) if (len(sl) < 2): if (not self._recording.is_set()): self.api.unrequestTracking() return False # calculate RMS noise Xvar = [] Yvar = [] Xmean = 0. Ymean = 0. for i in range(2,len(sl)): Xvar.append((sl[i][0]-sl[i-1][0])**2) Yvar.append((sl[i][1]-sl[i-1][1])**2) Xmean += sl[i][0] Ymean += sl[i][1] XRMS = (sum(Xvar) / len(Xvar))**0.5 YRMS = (sum(Yvar) / len(Yvar))**0.5 Xmean = Xmean / (len(sl)-2) Ymean = Ymean / (len(sl)-2) self.pxdsttresh = (XRMS, YRMS) # calculate pixels per cm pixpercm = (self.dispsize[0]/float(self.screensize[0]) + self.dispsize[1]/float(self.screensize[1])) / 2 # get accuracy accuracyPxX = abs( Xmean - x ) accuracyPxY = abs( Ymean - y ) self.accuracy = ( pix2deg(screendist, accuracyPxX, pixpercm), \ pix2deg(screendist, accuracyPxY, pixpercm) ) # calculate thresholds based on tracker settings self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm) self.pxaccuracy = (accuracyPxX, accuracyPxY ) self.pxspdtresh = deg2pix(screendist, self.spdtresh/1000.0, pixpercm) # in pixels per millisecond self.pxacctresh = deg2pix(screendist, self.accthresh/1000.0, pixpercm) # in pixels per millisecond**2 ## log self.log("pygaze calibration") self.log("accuracy (degrees) = X={}, Y={}".format( \ self.accuracy[0], self.accuracy[1] )) self.log("accuracy (in pixels) = X={}, Y={}".format( \ self.pxaccuracy[0], self.pxaccuracy[1])) self.log("precision (RMS noise in pixels) = X={}, Y={}".format( \ self.pxdsttresh[0], self.pxdsttresh[1])) self.log("distance between participant and display = {} cm".format(screendist)) self.log("fixation threshold = {} pixels".format(self.pxfixtresh)) self.log("speed threshold = {} pixels/ms".format(self.pxspdtresh)) self.log("acceleration threshold = {} pixels/ms**2".format(self.pxacctresh)) if (not self._recording.is_set()): self.api.unrequestTracking() return True ## Neatly closes connection to tracker. def close(self): if self._recording.is_set(): self.stop_recording() # Wait until the Queue is empty, or until 60 seconds have passed. queue_empty = self._logging_queue_empty.wait(timeout=15.0) if not queue_empty: print("WARNING = Logging Thread timeout occurred; something might have gone wrong!") # Signal to the Threads to stop. self._connected.clear() # Close the log file. self._log_file.close() # Close the connection. self.api.disconnect() self._connected = False ## Checks if the tracker is connected. def connected(self): isConnected = self.api.isConnected() if isConnected: self._connected.set() else: self._connected.clear() return isConnected ## Performs a drift check def drift_correction(self, pos=None, fix_triggered=False): return True ## Performs a fixation triggered drift correction by collecting # a number of samples and calculating the average distance from the # fixation position. def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30): pass ## Returns the difference between tracker time and PyGaze time, # which can be used to synchronize timing def get_eyetracker_clock_async(self): return 0 ## Writes a message to the log file. def log(self, msg): # Get current timestamp. self.sampleLock.acquire() if self.lastSample is None: t = 0 else: t = self.lastSample.timestampMicroSec self.sampleLock.release() # Construct a tuple, and add it to the queue. self._logging_queue.put(("MSG", t, msg)) ## Writes a variable's name and value to the log file def log_var(self, var, val): pass ## Returns the newest pupil size sample def pupil_size(self): self.sampleLock.acquire() pupilSize = -1 if (self.lastSample is not None): if self.eye_used == 0: pupilSize = 2.*self.lastSample.pupilRadiusLeft; elif self.eye_used == 1: pupilSize = 2.*self.lastSample.pupilRadiusRight; elif self.eye_used == 2: pupilSize = self.lastSample.pupilRadiusLeft + self.lastSample.pupilRadiusRight; self.sampleLock.release() return pupilSize ## Returns newest available gaze position. def sample(self): self.sampleLock.acquire() por = (-1, -1) if (self.lastSample is not None): if self.eye_used == 0: por = (self.lastSample.porLeftX, self.lastSample.porLeftY) elif self.eye_used == 1: por = (self.lastSample.porRightX, self.lastSample.porRightY) elif self.eye_used == 2: por = (self.lastSample.porFilteredX, self.lastSample.porFilteredY) self.sampleLock.release() return por # Directly sends a command to the eye tracker. def send_command(self, cmd): pass ## Set the event detection type to either PyGaze algorithms, or # native algorithms. def set_detection_type(self, eventdetection): # detection type for saccades, fixations, blinks (pygaze or native) return ('pygaze','pygaze','pygaze') ## Specifies a custom function to draw the calibration target. def set_draw_calibration_target_func(self, func): pass ## Specifies a custom function to draw the drift-correction target. def set_draw_drift_correction_target_func(self, func): pass ## Logs the eye_used variable, based on which eye was specified # (if both eyes are being tracked, the left eye is used) def set_eye_used(self): pass ## Starts recording. def start_recording(self): resultTracking = self.api.requestTracking(0) if (resultTracking != ELApi.ReturnStart.SUCCESS): raise Exception("unable to start eye tracker") self._recording.set() ## Sends a status message to the eye tracker, which is displayed in the tracker's GUI def status_msg(self, msg): pass ## Stops recording. def stop_recording(self): self.api.unrequestTracking() self._recording.clear() ## Waits for an event. def wait_for_event(self, event): print("waitforevent", flush=True) if event == 3: # STARTBLINK return self.wait_for_blink_start() elif event == 4: # ENDBLINK return self.wait_for_blink_end() elif event == 5: # STARTSACC return self.wait_for_saccade_start() elif event == 6: # ENDSACC return self.wait_for_saccade_end() elif event == 7: # STARTFIX return self.wait_for_fixation_start() elif event == 8: # ENDFIX return self.wait_for_fixation_end() else: raise Exception("wait_for_event({}) is not supported".format(event)) ## Waits for a blink end and returns the blink ending time. def wait_for_blink_end(self): blinking = True # loop while there is a blink while blinking: # get newest sample gazepos = self.sample() # check if it's valid if self.is_valid_sample(gazepos): # if it is a valid sample, blinking has stopped blinking = False # return timestamp of blink end return clock.get_time() ## Waits for a blink start and returns the blink starting time. def wait_for_blink_start(self): blinking = False # loop until there is a blink while not blinking: # get newest sample gazepos = self.sample() # check if it's a valid sample if not self.is_valid_sample(gazepos): # get timestamp for possible blink start t0 = clock.get_time() # loop until a blink is determined, or a valid sample occurs while not self.is_valid_sample(self.sample()): # check if time has surpassed BLINKTHRESH if clock.get_time()-t0 >= self.blinkthresh: # return timestamp of blink start return t0 ## Returns time and gaze position when a fixation has ended. def wait_for_fixation_end(self): print("wait_for_fixation_end", flush=True) # function assumes that a 'fixation' has ended when a deviation of more than fixtresh # from the initial 'fixation' position has been detected # get starting time and position stime, spos = self.wait_for_fixation_start() # loop until fixation has ended while True: # get new sample npos = self.sample() # get newest sample # check if sample is valid if self.is_valid_sample(npos): # check if sample deviates to much from starting position if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras # break loop if deviation is too high break return clock.get_time(), spos ## Returns starting time and position when a fixation is started. def wait_for_fixation_start(self): print("wait_for_fixation_start", flush=True) # function assumes a 'fixation' has started when gaze position # remains reasonably stable for self.fixtimetresh # get starting position spos = self.sample() while not self.is_valid_sample(spos): spos = self.sample() # get starting time t0 = clock.get_time() # wait for reasonably stable position moving = True while moving: # get new sample npos = self.sample() # check if sample is valid if self.is_valid_sample(npos): # check if new sample is too far from starting position if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras # if not, reset starting position and time spos = copy.copy(npos) t0 = clock.get_time() # if new sample is close to starting sample else: # get timestamp t1 = clock.get_time() # check if fixation time threshold has been surpassed if t1 - t0 >= self.fixtimetresh: # return time and starting position return t1, spos ## Returns ending time, starting and end position when a saccade is # ended. def wait_for_saccade_end(self): # get starting position (no blinks) t0, spos = self.wait_for_saccade_start() # get valid sample prevpos = self.sample() while not self.is_valid_sample(prevpos): prevpos = self.sample() # get starting time, intersample distance, and velocity t1 = clock.get_time() s = ((prevpos[0]-spos[0])**2 + (prevpos[1]-spos[1])**2)**0.5 # = intersample distance = speed in px/sample v0 = s / (t1-t0) # run until velocity and acceleration go below threshold saccadic = True while saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # calculate distance s = ((newpos[0]-prevpos[0])**2 + (newpos[1]-prevpos[1])**2)**0.5 # = speed in pixels/sample # calculate velocity v1 = s / (t1-t0) # calculate acceleration a = (v1-v0) / (t1-t0) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample) # check if velocity and acceleration are below threshold if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and a < 0): saccadic = False epos = newpos[:] etime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return etime, spos, epos ## Returns starting time and starting position when a saccade is started. def wait_for_saccade_start(self): # get starting position (no blinks) newpos = self.sample() while not self.is_valid_sample(newpos): newpos = self.sample() # get starting time, position, intersampledistance, and velocity t0 = clock.get_time() prevpos = newpos[:] s = 0 v0 = 0 # get samples saccadic = False while not saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # check if distance is larger than precision error sx = newpos[0]-prevpos[0]; sy = newpos[1]-prevpos[1] if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 > self.weightdist: # weigthed distance = (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise # calculate distance s = ((sx)**2 + (sy)**2)**0.5 # intersampledistance = speed in pixels/ms # calculate velocity v1 = s / (t1-t0) # calculate acceleration a = (v1-v0) / (t1-t0) # acceleration in pixels/ms**2 # check if either velocity or acceleration are above threshold values if v1 > self.pxspdtresh or a > self.pxacctresh: saccadic = True spos = prevpos[:] stime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return stime, spos
class OpenGazeTracker(BaseEyeTracker): """A class for OpenGazeTracker objects""" def __init__(self, display, logfile=settings.LOGFILE, \ eventdetection=settings.EVENTDETECTION, \ saccade_velocity_threshold=35, \ saccade_acceleration_threshold=9500, \ blink_threshold=settings.BLINKTHRESH, \ **args): """Initializes the OpenGazeTracker object arguments display -- a pygaze.display.Display instance keyword arguments logfile -- logfile name (string value); note that this is the name for the eye data log file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, OpenGazeTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = settings.DISPSIZE # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # show a message self.screen.clear() self.screen.draw_text( text="Initialising the eye tracker, please wait...", fontsize=20) self.disp.fill(self.screen) self.disp.show() # output file properties self.outputfile = logfile + '.tsv' self.extralogname = logfile + '_log.txt' self.extralogfile = open(self.extralogname, 'w') # eye tracker properties self.has_been_calibrated_before = False self.connected = False self.recording = False self.errdist = 2 # degrees; maximal error for drift correction self.pxerrdist = 30 # initial error in pixels self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1,-1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # connect to the tracker self.opengaze = OpenGaze(ip='127.0.0.1', port=4242, \ logfile=self.outputfile, debug=False) # get info on the sample rate # TODO: Compute after streaming some samples? self.samplerate = 60.0 self.sampletime = 1000.0 / self.samplerate # initiation report self._elog("pygaze initiation report start") self._elog("display resolution: %sx%s" % (self.dispsize[0], self.dispsize[1])) self._elog("display size in cm: %sx%s" % (self.screensize[0], self.screensize[1])) self._elog("samplerate: %.2f Hz" % self.samplerate) self._elog("sampletime: %.2f ms" % self.sampletime) self._elog("fixation threshold: %s degrees" % self.fixtresh) self._elog("speed threshold: %s degrees/second" % self.spdtresh) self._elog("acceleration threshold: %s degrees/second**2" % self.accthresh) self._elog("pygaze initiation report end") def _elog(self, msg): """Logs a message to the additional log. """ self.extralogfile.write(msg + '\n') def calibrate(self): """Calibrates the eye tracking system arguments None keyword arguments None returns success -- returns True if calibration succeeded, or False if not; in addition a calibration log is added to the log file and some properties are updated (i.e. the thresholds for detection algorithms) """ # show a message self.screen.clear() self.screen.draw_text( text="Preparing the calibration, please wait...", fontsize=20) self.disp.fill(self.screen) self.disp.show() # CALIBRATION # Set the duration of the calibration animation, and of the # calibration point. caldur = {'animation':1.5, 'point':1.0, 'timeout':10.0} self.opengaze.calibrate_delay(caldur['animation']) self.opengaze.calibrate_timeout(caldur['point']) # Determine the calibration points. #calibpoints = [(0.1,0.5), (0.5, 0.1), (0.5, 0.5), (0.5, 0.9), (0.9, 0.5)] calibpoints = [] for x in [0.1, 0.5, 0.9]: for y in [0.1, 0.5, 0.9]: calibpoints.append((x,y)) random.shuffle(calibpoints) # Clear the OpenGaze calibration. self.opengaze.calibrate_clear() # Add all new points (as proportions of the display resolution). for x, y in calibpoints: self.opengaze.calibrate_addpoint(x, y) # show a message self.screen.clear() self.screen.draw_text( text="Press Space to calibrate, S to skip, and Q to quit", fontsize=20) self.disp.fill(self.screen) self.disp.show() # wait for keyboard input key, keytime = self.kb.get_key(keylist=['q', 's', 'space'], timeout=None, flush=True) if key == 's': return True if key == 'q': quited = True else: quited = False # Run until the user is statisfied, or quits. calibrated = False while not quited and not calibrated: # CALIBRATE # Run a new calibration. The result is the latest available # calibration results as a list of dicts, each with the # following keys: # CALX: Calibration point's horizontal coordinate. # CALY: Calibration point's vertical coordinate # LX: Left eye's recorded horizontal point of gaze. # LY: Left eye's recorded vertical point of gaze. # LV: Left eye's validity status (1=valid, 0=invalid) # RX: Right eye's recorded horizontal point of gaze. # RY: Right eye's recorded vertical point of gaze. # RV: Right eye's validity status (1=valid, 0=invalid) # Clear the existing calibration results. self.opengaze.clear_calibration_result() # Make sure we have the right calibration points. # NOTE: Somehow polling this results in no weird OpenGaze errors # on calibrations that occur after the first one. # (WTF, Gazepoint?!) calibpoints = self.opengaze.get_calibration_points() # Show the calibration screen. # NOTE: THIS DOESN'T WORK IN FULL SCREEN MODE :( #self.opengaze.calibrate_show(True) # Start the calibration. self.opengaze.calibrate_start(True) # Show the calibration dots. The strategy is to wait for the # next calibration point start, then to show that dot, and # then to show the animation (hoping to Godzilla that the # timing roughly matches that of the OpenGaze server), and # then to keep the target on-screen until the start of the # next calibration point. pointnr = 0 n_points = len(calibpoints) # On a restart, the calibration starts with the last point, # before looping through all the other points. (DAMN YOU, # GAZEPOINT, THAT DOES NOT MAKE SENSE!) if self.has_been_calibrated_before: n_points += 1 # Loop through all the points. for i in range(n_points): # Wait for the next calibration point. pointnr, pos = self.opengaze.wait_for_calibration_point_start( \ timeout=caldur['timeout']) # The wait_for_calibration_point_start function returns # None if no point was started before a timeout. We # should panic if no calibration point was started. if pointnr is None: # Break the calibration loop, and quit the current # calibration. quited = True break # Compute the point in display coordinates. x = int(pos[0] * self.dispsize[0]) y = int(pos[1] * self.dispsize[1]) # Get a timestamp for the start of the animation. t1 = clock.get_time() t = clock.get_time() # Show the animation. while t - t1 < caldur['animation']*1000: # Check if the Q key has been pressed, and break # if it has. if self.kb.get_key(keylist=['q'], timeout=10, \ flush=False)[0] == 'q': quited = True break # Clear the screen. self.screen.clear(colour=(0,0,0)) # Caculate at which point in the animation we are. p = 1.0 - float(t-t1) / (caldur['animation']*1000) # Draw the animated disk. self.screen.draw_circle(colour=(255,255,255), \ pos=(x, y), r=max(1, int(30*p)), fill=True) # Draw the calibration target. self.screen.draw_circle(colour=(255,0,0), \ pos=(x, y), r=3, fill=True) # Show the screen. self.disp.fill(self.screen) t = self.disp.show() # Check if the Q key has been pressed, and break # if it has. if self.kb.get_key(keylist=['q'], timeout=1, \ flush=False)[0] == 'q': quited = True # Don't show the other points if Q was pressed. if quited: break # Wait for the calibration result. calibresult = None while (calibresult is None) and (not quited): # Check if there is a result yet (returns None if there # isn't). calibresult = self.opengaze.get_calibration_result() # Check if the Q key has been pressed, and break if it # is. if self.kb.get_key(keylist=['q'], timeout=100, \ flush=False)[0] == 'q': quited = True break # Hide the calibration window. # NOTE: No need for this in full-screen mode. #self.opengaze.calibrate_show(False) # Retry option if the calibration was aborted if quited: # show retry message self.screen.clear() self.screen.draw_text( \ text="Calibration aborted. Press Space to restart or 'Q' to quit", \ fontsize=20) self.disp.fill(self.screen) self.disp.show() # get input key, keytime = self.kb.get_key(keylist=['q','space'], \ timeout=None, flush=True) if key == 'space': # unset quited Boolean quited = False # skip further processing continue # Empty display. self.disp.fill() self.disp.show() # RESULTS # Clear the screen. self.screen.clear() # draw results for each point if calibresult is not None: # Loop through all points. for p in calibresult: # Convert the points (relative coordinates) to # display coordinates. for param in ['CALX', 'LX', 'RX']: p[param] *= self.dispsize[0] for param in ['CALY', 'LY', 'RY']: p[param] *= self.dispsize[1] # Draw the target. self.screen.draw_fixation(fixtype='dot', colour=(115,210,22), \ pos=(p['CALX'], p['CALY'])) # If the calibration for this target is valid, # draw the estimated point. We have two points: # one for left and one for right. col = {'L':(32,74,135), 'R':(92,53,102)} for eye in ['L', 'R']: # Check if the eye is valid, and choose the # position and colour accordingly. if p['%sV' % (eye)]: x = p['%sX' % (eye)] y = p['%sY' % (eye)] c = col[eye] else: x = p['CALX'] y = p['CALY'] c = (204,0,0) # Draw a line between the estimated and the # actual point. if p['%sV' % (eye)]: self.screen.draw_line(colour=c, \ spos=(p['CALX'], p['CALY']), \ epos=(x,y), \ pw=3) # Draw the estimated gaze point. self.screen.draw_fixation( \ fixtype='dot', pos=(x, y), colour=c) # Annotate which eye this is. self.screen.draw_text(text=eye, \ pos=(x+10, y+10), colour=c, \ fontsize=20) # Draw input options. self.screen.draw_text( text="Press Space to continue or 'R' to restart", pos=(int(self.dispsize[0]*0.5), \ int(self.dispsize[1]*0.25+60)), \ fontsize=20) else: self.screen.draw_text( text="Calibration failed. Press 'R' to try again.", fontsize=20) # Show the results. self.disp.fill(self.screen) self.disp.show() # Wait for input. key, keytime = self.kb.get_key(keylist=['space','r'], \ timeout=None, flush=True) # Process input. if key == 'space': calibrated = True # Set the 'restart' flag to True, because everything that # happens after this will be a repeated calibration or # will have noting to do with the calibration. self.has_been_calibrated_before = True # Calibration failed if the user quited. if quited: return False # NOISE CALIBRATION # Get all error estimates (distance between the real and the # estimated points in pixels). err = {'LX':[], 'LY':[], 'RX':[], 'RY':[]} var = {'LX':[], 'LY':[], 'RX':[], 'RY':[]} for p in calibresult: # Only use the point if it was valid. for eye in ['L', 'R']: for dim in ['X', 'Y']: if p['%sV' % (eye)]: # Compute the distance between the points. d = p['%s%s' % (eye, dim)] - \ p['CAL%s' % (dim)] # Store the distance. err['%s%s' % (eye, dim)].append(abs(d)) # Store the squared distance. var['%s%s' % (eye, dim)].append(d**2) # Compute the RMS noise for the calibration points. xnoise = (math.sqrt(sum(var['LX']) / float(len(var['LX']))) + \ math.sqrt(sum(var['RX']) / float(len(var['RX'])))) / 2.0 ynoise = (math.sqrt(sum(var['LY']) / float(len(var['LY']))) + \ math.sqrt(sum(var['RY']) / float(len(var['RY'])))) / 2.0 self.pxdsttresh = (xnoise, ynoise) # AFTERMATH # store some variables pixpercm = (self.dispsize[0] / float(self.screensize[0]) + \ self.dispsize[1]/float(self.screensize[1])) / 2 screendist = settings.SCREENDIST # calculate thresholds based on tracker settings self.accuracy = ( \ (pix2deg(screendist, sum(err['LX']) / float(len(err['LX'])), pixpercm), \ pix2deg(screendist, sum(err['LY']) / float(len(err['LY'])), pixpercm)), \ (pix2deg(screendist, sum(err['RX']) / float(len(err['RX'])), pixpercm), \ pix2deg(screendist, sum(err['RY']) / float(len(err['RY'])), pixpercm))) self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm) self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm) self.pxaccuracy = ( \ (sum(err['LX']) / float(len(err['LX'])), \ sum(err['LY']) / float(len(err['LY']))), \ (sum(err['RX']) / float(len(err['RX'])), \ sum(err['RY']) / float(len(err['RY'])))) self.pxspdtresh = deg2pix(screendist, self.spdtresh/1000.0, pixpercm) # in pixels per millisecond self.pxacctresh = deg2pix(screendist, self.accthresh/1000.0, pixpercm) # in pixels per millisecond**2 # calibration report self._elog("pygaze calibration report start") self._elog("accuracy (degrees): LX=%s, LY=%s, RX=%s, RY=%s" % (self.accuracy[0][0],self.accuracy[0][1],self.accuracy[1][0],self.accuracy[1][1])) self._elog("accuracy (in pixels): LX=%s, LY=%s, RX=%s, RY=%s" % (self.pxaccuracy[0][0],self.pxaccuracy[0][1],self.pxaccuracy[1][0],self.pxaccuracy[1][1])) self._elog("precision (RMS noise in pixels): X=%s, Y=%s" % (self.pxdsttresh[0],self.pxdsttresh[1])) self._elog("distance between participant and display: %s cm" % screendist) self._elog("fixation threshold: %s pixels" % self.pxfixtresh) self._elog("speed threshold: %s pixels/ms" % self.pxspdtresh) self._elog("acceleration threshold: %s pixels/ms**2" % self.pxacctresh) self._elog("pygaze calibration report end") return True def close(self): """Neatly close connection to tracker arguments None returns Nothing -- saves data and sets self.connected to False """ # Close additional log file. self.extralogfile.close() # close connection self.opengaze.close() self.connected = False def connected(self): """Checks if the tracker is connected arguments None returns connected -- True if connection is established, False if not; sets self.connected to the same value """ self.connected = self.opengaze._connected.is_set() return self.connected def drift_correction(self, pos=None, fix_triggered=False): """Performs a drift check arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) fix_triggered -- Boolean indicating if drift check should be performed based on gaze position (fix_triggered = True) or on spacepress (fix_triggered = False) (default = False) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 if fix_triggered: return self.fix_triggered_drift_correction(pos) # DEBUG # print(("Running drift correction, pos=(%d, %d)" % (pos[0], pos[1]))) # # # # # self.draw_drift_correction_target(pos[0], pos[1]) pressed = False while not pressed: pressed, presstime = self.kb.get_key() if pressed: if pressed == 'escape' or pressed == 'q': print("libopengaze.OpenGazeTracker.drift_correction: 'q' or 'escape' pressed") return self.calibrate() gazepos = self.sample() if ((gazepos[0]-pos[0])**2 + (gazepos[1]-pos[1])**2)**0.5 < self.pxerrdist: return True else: self.errorbeep.play() return False def draw_drift_correction_target(self, x, y): """ Draws the drift-correction target. arguments x -- The X coordinate y -- The Y coordinate """ self.screen.clear() self.screen.draw_fixation(fixtype='dot', colour=settings.FGC, pos=(x,y), pw=0, diameter=12) self.disp.fill(self.screen) self.disp.show() def draw_calibration_target(self, x, y): self.draw_drift_correction_target(x, y) def fix_triggered_drift_correction(self, pos=None, min_samples=4, max_dev=120, timeout=10000): """Performs a fixation triggered drift correction by collecting a number of samples and calculating the average distance from the fixation position arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) min_samples -- minimal amount of samples after which a fixation is accepted (default = 4) max_dev -- maximal deviation from fixation in pixels (default = 120) timeout -- Time in milliseconds until fixation-triggering is given up on, and calibration is started (default = 10000) returns checked -- Boolean indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 self.draw_drift_correction_target(pos[0], pos[1]) t0 = clock.get_time() consecutive_count = 0 while consecutive_count < min_samples: # Get new sample. x, y = self.sample() # Ignore empty samples. if (x is None) or (y is None): continue # Measure the distance to the target position. d = ((x-pos[0])**2 + (y-pos[1])**2)**0.5 # Check whether the distance is below the allowed distance. if d <= max_dev: # Increment count. consecutive_count += 1 else: # Reset count. consecutive_count = 0 # Check for a timeout. if clock.get_time() - t0 > timeout: print("libopengaze.OpenGazeTracker.fix_triggered_drift_correction: timeout during fixation-triggered drift check") return self.calibrate() # Pressing escape enters the calibration screen. if self.kb.get_key()[0] in ['escape','q']: print("libopengaze.OpenGazeTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed") return self.calibrate() return True def get_eyetracker_clock_async(self): """Not supported for OpenGazeTracker (yet)""" print("function not supported yet") def log(self, msg): """Writes a message to the log file arguments ms -- a string to include in the log file returns Nothing -- uses native log function of iViewX to include a line in the log file """ self._elog(msg) if self.recording: self.opengaze.log(msg) def prepare_drift_correction(self, pos): """Not supported for OpenGazeTracker (yet)""" print("function not supported yet") def pupil_size(self): """Return pupil size arguments None returns pupil size -- returns pupil diameter for the eye that is currently being tracked (as specified by self.eye_used) or -1 when no data is obtainable """ # get newest pupil size ps = self.opengaze.pupil_size() # invalid data if ps == None: return -1 # check if the new pupil size is the same as the previous if ps != self.prevps: # update the pupil size self.prevps = copy.copy(ps) return self.prevps def sample(self): """Returns newest available gaze position arguments None returns sample -- an (x,y) tuple or a (-1,-1) on an error """ # Get newest sample. rs = self.opengaze.sample() # Invalid data. if rs == (None, None): return (-1,-1) # Convert relative coordinates to display coordinates. s = (rs[0]*self.dispsize[0], rs[1]*self.dispsize[1]) # Check if the new sample is the same as the previous. if s != self.prevsample: # Update the current sample. self.prevsample = copy.copy(s) return self.prevsample def send_command(self, cmd): """Function not supported. Use self.opengaze instead; it supports all possible API calls. """ print("send_command function not supported; use self.opengaze instead") def start_recording(self): """Starts recording eye position arguments None returns Nothing -- sets self.recording to True when recording is successfully started """ self.opengaze.start_recording() self.recording = True def status_msg(self, msg): """Not supported for OpenGazeTracker (yet)""" print("function not supported yet") def stop_recording(self): """Stop recording eye position arguments None returns Nothing -- sets self.recording to False when recording is successfully started """ self.opengaze.stop_recording() self.recording = False def set_detection_type(self, eventdetection): """Set the event detection type to either PyGaze algorithms, or native algorithms as provided by the manufacturer (only if available: detection type will default to PyGaze if no native functions are available) arguments eventdetection -- a string indicating which detection type should be employed: either 'pygaze' for PyGaze event detection algorithms or 'native' for manufacturers algorithms (only if available; will default to 'pygaze' if no native event detection is available) returns -- detection type for saccades, fixations and blinks in a tuple, e.g. ('pygaze','native','native') when 'native' was passed, but native detection was not available for saccade detection """ if eventdetection in ['pygaze','native']: self.eventdetection = eventdetection return ('pygaze','pygaze','pygaze') def wait_for_event(self, event): """Waits for event arguments event -- an integer event code, one of the following: 3 = STARTBLINK 4 = ENDBLINK 5 = STARTSACC 6 = ENDSACC 7 = STARTFIX 8 = ENDFIX returns outcome -- a self.wait_for_* method is called, depending on the specified event; the return values of corresponding method are returned """ if event == 5: outcome = self.wait_for_saccade_start() elif event == 6: outcome = self.wait_for_saccade_end() elif event == 7: outcome = self.wait_for_fixation_start() elif event == 8: outcome = self.wait_for_fixation_end() elif event == 3: outcome = self.wait_for_blink_start() elif event == 4: outcome = self.wait_for_blink_end() else: raise Exception("Error in libopengaze.OpenGazeTracker.wait_for_event: eventcode %s is not supported" % event) return outcome def wait_for_blink_end(self): """Waits for a blink end and returns the blink ending time arguments None returns timestamp -- blink ending time in milliseconds, as measured from experiment begin time """ # # # # # # OpenGaze method if self.eventdetection == 'native': # print warning, since OpenGaze does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but OpenGaze does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = True # loop while there is a blink while blinking: # get newest sample gazepos = self.sample() # check if it's valid if self.is_valid_sample(gazepos): # if it is a valid sample, blinking has stopped blinking = False # return timestamp of blink end return clock.get_time() def wait_for_blink_start(self): """Waits for a blink start and returns the blink starting time arguments None returns timestamp -- blink starting time in milliseconds, as measured from experiment begin time """ # # # # # # OpenGaze method if self.eventdetection == 'native': # print warning, since OpenGaze does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but OpenGaze does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = False # loop until there is a blink while not blinking: # get newest sample gazepos = self.sample() # check if it's a valid sample if not self.is_valid_sample(gazepos): # get timestamp for possible blink start t0 = clock.get_time() # loop until a blink is determined, or a valid sample occurs while not self.is_valid_sample(self.sample()): # check if time has surpassed BLINKTHRESH if clock.get_time()-t0 >= self.blinkthresh: # return timestamp of blink start return t0 def wait_for_fixation_end(self): """Returns time and gaze position when a fixation has ended; function assumes that a 'fixation' has ended when a deviation of more than self.pxfixtresh from the initial fixation position has been detected (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # OpenGaze method if self.eventdetection == 'native': # print warning, since OpenGaze does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but OpenGaze does not offer fixation detection; \ PyGaze algorithm will be used") # # # # # # PyGaze method # function assumes that a 'fixation' has ended when a deviation of more than fixtresh # from the initial 'fixation' position has been detected # get starting time and position stime, spos = self.wait_for_fixation_start() # loop until fixation has ended while True: # get new sample npos = self.sample() # get newest sample # check if sample is valid if self.is_valid_sample(npos): # check if sample deviates to much from starting position if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras # break loop if deviation is too high break return clock.get_time(), spos def wait_for_fixation_start(self): """Returns starting time and position when a fixation is started; function assumes a 'fixation' has started when gaze position remains reasonably stable (i.e. when most deviant samples are within self.pxfixtresh) for five samples in a row (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # OpenGaze method if self.eventdetection == 'native': # print warning, since OpenGaze does not have a fixation start # detection built into their API (only ending) print("WARNING! 'native' event detection has been selected, \ but OpenGaze does not offer fixation detection; \ PyGaze algorithm will be used") # # # # # # PyGaze method # function assumes a 'fixation' has started when gaze position # remains reasonably stable for self.fixtimetresh # get starting position spos = self.sample() while not self.is_valid_sample(spos): spos = self.sample() # get starting time t0 = clock.get_time() # wait for reasonably stable position moving = True while moving: # get new sample npos = self.sample() # check if sample is valid if self.is_valid_sample(npos): # check if new sample is too far from starting position if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras # if not, reset starting position and time spos = copy.copy(npos) t0 = clock.get_time() # if new sample is close to starting sample else: # get timestamp t1 = clock.get_time() # check if fixation time threshold has been surpassed if t1 - t0 >= self.fixtimetresh: # return time and starting position return t1, spos def wait_for_saccade_end(self): """Returns ending time, starting and end position when a saccade is ended; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos, endpos -- endtime in milliseconds (from expbegintime); startpos and endpos are (x,y) gaze position tuples """ # # # # # # OpenGaze method if self.eventdetection == 'native': # print warning, since OpenGaze does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but OpenGaze does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) t0, spos = self.wait_for_saccade_start() # get valid sample prevpos = self.sample() while not self.is_valid_sample(prevpos): prevpos = self.sample() # get starting time, intersample distance, and velocity t1 = clock.get_time() s = ((prevpos[0]-spos[0])**2 + (prevpos[1]-spos[1])**2)**0.5 # = intersample distance = speed in px/sample v0 = s / (t1-t0) # run until velocity and acceleration go below threshold saccadic = True while saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # calculate distance s = ((newpos[0]-prevpos[0])**2 + (newpos[1]-prevpos[1])**2)**0.5 # = speed in pixels/sample # calculate velocity v1 = s / (t1-t0) # calculate acceleration a = (v1-v0) / (t1-t0) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample) # check if velocity and acceleration are below threshold if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and a < 0): saccadic = False epos = newpos[:] etime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return etime, spos, epos def wait_for_saccade_start(self): """Returns starting time and starting position when a saccade is started; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos -- endtime in milliseconds (from expbegintime); startpos is an (x,y) gaze position tuple """ # # # # # # OpenGaze method if self.eventdetection == 'native': # print warning, since OpenGaze does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but OpenGaze does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) newpos = self.sample() while not self.is_valid_sample(newpos): newpos = self.sample() # get starting time, position, intersampledistance, and velocity t0 = clock.get_time() prevpos = newpos[:] s = 0 v0 = 0 # get samples saccadic = False while not saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # check if distance is larger than precision error sx = newpos[0]-prevpos[0]; sy = newpos[1]-prevpos[1] if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 > self.weightdist: # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise # calculate distance s = ((sx)**2 + (sy)**2)**0.5 # intersampledistance = speed in pixels/ms # calculate velocity v1 = s / (t1-t0) # calculate acceleration a = (v1-v0) / (t1-t0) # acceleration in pixels/ms**2 # check if either velocity or acceleration are above threshold values if v1 > self.pxspdtresh or a > self.pxacctresh: saccadic = True spos = prevpos[:] stime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return stime, spos def is_valid_sample(self, gazepos): """Checks if the sample provided is valid, based on OpenGaze specific criteria (for internal use) arguments gazepos -- a (x,y) gaze position tuple, as returned by self.sample() returns valid -- a Boolean: True on a valid sample, False on an invalid sample """ # return False if a sample is invalid if gazepos == (None,None) or gazepos == (-1,-1): return False # in any other case, the sample is valid return True
class EyelinkGraphics(custom_display): """ Implements the EyeLink graphics that are shown on the experimental PC, such as the camera image, and the calibration dots. This class only implements the drawing operations, and little to no of the logic behind the set-up, which is implemented in PyLink. """ def __init__(self, libeyelink, tracker): """ Constructor. Arguments: libeyelink -- A libeyelink object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.libeyelink = libeyelink self.display = libeyelink.display self.screen = Screen(disptype=DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=0) self.mouse = Mouse(timeout=0) if DISPTYPE == 'pygame': self.kb.set_timeout(timeout=0.001) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. #if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), '__eyelink__.jpg') # drawing properties self.xc = self.display.dispsize[0]/2 self.yc = self.display.dispsize[1]/2 self.extra_info = True self.ld = 40 # line distance self.fontsize = libeyelink.fontsize self.title = "" self.display_open = True # menu self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False) self.menuscreen.draw_text(text="Eyelink calibration menu", pos=(self.xc,self.yc-6*self.ld), center=True, font='mono', fontsize=int(2*self.fontsize), antialias=True) self.menuscreen.draw_text(text="%s (pygaze %s, pylink %s)" \ % (libeyelink.eyelink_model, pygaze.version, pylink.__version__), pos=(self.xc,self.yc-5*self.ld), center=True, font='mono', fontsize=int(.8*self.fontsize), antialias=True) self.menuscreen.draw_text(text="Press C to calibrate", pos=(self.xc, self.yc-3*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press V to validate", pos=(self.xc, self.yc-2*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press A to auto-threshold", pos=(self.xc,self.yc-1*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press I to toggle extra info in camera image", pos=(self.xc,self.yc-0*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press Enter to show camera image", pos=(self.xc,self.yc+1*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text( text="(then change between images using the arrow keys)", pos=(self.xc, self.yc+2*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press Escape to abort experiment", pos=(self.xc, self.yc+4*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.menuscreen.draw_text(text="Press Q to exit menu", pos=(self.xc, self.yc+5*self.ld), center=True, font='mono', fontsize=self.fontsize, antialias=True) # beeps self.__target_beep__ = Sound(osc='sine', freq=440, length=50, attack=0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200, attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200, attack=0, decay=0, soundfile=None) # Colors self.color = { pylink.CR_HAIR_COLOR: pygame.Color('white'), pylink.PUPIL_HAIR_COLOR: pygame.Color('white'), pylink.PUPIL_BOX_COLOR: pygame.Color('green'), pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color('red'), pylink.MOUSE_CURSOR_COLOR: pygame.Color('red'), 'font': pygame.Color('white'), } # Font pygame.font.init() self.font = pygame.font.SysFont('Courier New', 11) # further properties self.state = None self.pal = None self.size = (0,0) self.set_tracker(tracker) self.last_mouse_state = -1 self.bit64 = '64bit' in platform.architecture() self.imagebuffer = self.new_array() def close(self): """ Is called when the connection and display are shutting down. """ self.display_open = False def new_array(self): """ Creates a new array with a system-specific format. Returns: An array. """ # On 64 bit Linux, we need to use an unsigned int data format. # <https://www.sr-support.com/showthread.php?3215-Visual-glitch-when-/ # sending-eye-image-to-display-PC&highlight=ubuntu+pylink> if os.name == 'posix' and self.bit64: return array.array('I') return array.array('L') def set_tracker(self, tracker): """ Connects the tracker to the graphics environment. Arguments: tracker -- An tracker object as returned by pylink.EyeLink(). """ self.tracker = tracker self.tracker_version = tracker.getTrackerVersion() if self.tracker_version >= 3: self.tracker.sendCommand("enable_search_limits=YES") self.tracker.sendCommand("track_search_limits=YES") self.tracker.sendCommand("autothreshold_click=YES") self.tracker.sendCommand("autothreshold_repeat=YES") self.tracker.sendCommand("enable_camera_position_detect=YES") def setup_cal_display(self): """ Sets up the initial calibration display, which contains a menu with instructions. """ # show instructions self.display.fill(self.menuscreen) self.display.show() def exit_cal_display(self): """Exits calibration display.""" self.clear_cal_display() def record_abort_hide(self): """TODO: What does this do?""" pass def clear_cal_display(self): """Clears the calibration display""" self.display.fill() self.display.show() def erase_cal_target(self): """TODO: What does this do?""" self.clear_cal_display() def draw_cal_target(self, x, y): """ Draws calibration target. Arguments: x -- The X coordinate of the target. y -- The Y coordinate of the target. """ self.play_beep(pylink.CAL_TARG_BEEP) self.screen.clear() self.screen.draw_fixation(fixtype='dot', pos=(x,y)) self.display.fill(screen=self.screen) self.display.show() def play_beep(self, beepid): """ Plays a sound. Arguments: beepid -- A number that identifies the sound. """ if beepid == pylink.CAL_TARG_BEEP: # For some reason, playing the beep here doesn't work, so we have # to play it when the calibration target is drawn. if EYELINKCALBEEP: self.__target_beep__.play() elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP: # show a picture self.screen.clear() self.screen.draw_text( text="calibration lost, press 'Enter' to return to menu", pos=(self.xc,self.yc), center=True, font='mono', fontsize=self.fontsize, antialias=True) self.display.fill(self.screen) self.display.show() # play beep self.__target_beep__error__.play() elif beepid == pylink.CAL_GOOD_BEEP: self.screen.clear() if self.state == "calibration": self.screen.draw_text( text="Calibration succesfull, press 'v' to validate", pos=(self.xc,self.yc), center=True, font='mono', fontsize=self.fontsize, antialias=True) elif self.state == "validation": self.screen.draw_text( text="Validation succesfull, press 'Enter' to return to menu", pos=(self.xc,self.yc), center=True, font='mono', fontsize=self.fontsize, antialias=True) else: self.screen.draw_text(text="Press 'Enter' to return to menu", pos=(self.xc,self.yc), center=True, font='mono', fontsize=self.fontsize, antialias=True) # show screen self.display.fill(self.screen) self.display.show() # play beep self.__target_beep__done__.play() else: # DC_GOOD_BEEP or DC_TARG_BEEP pass def draw_line(self, x1, y1, x2, y2, colorindex): """ Unlike the function name suggests, this draws a single pixel. I.e. the end coordinates are always exactly one pixel away from the start coordinates. Arguments: x1 -- The starting x. y1 -- The starting y. x2 -- The end x. y2 -- The end y. colorIndex -- A color index. """ x1 = int(self.scale*x1) y1 = int(self.scale*y1) x2 = int(self.scale*x2) y2 = int(self.scale*y2) pygame.draw.line(self.cam_img, self.color[colorindex], (x1, y1), (x2, y2)) def draw_lozenge(self, x, y, w, h, colorindex): """ desc: Draws a rectangle. arguments: x: desc: X coordinate. type: int y: desc: Y coordinate. type: int w: desc: A width. type: int h: desc: A height. type: int colorindex: desc: A colorindex. type: int """ x = int(self.scale*x) y = int(self.scale*y) w = int(self.scale*w) h = int(self.scale*h) pygame.draw.rect(self.cam_img, self.color[colorindex], (x, y, w, h), 2) def draw_title(self): """ desc: Draws title info. """ y = 0 for line in self.title: surf = self.font.render(line, 0, self.color['font']) self.cam_img.blit(surf, (1, y)) y += 12 def get_mouse_state(self): """ desc: Gets the mouse position and state. returns: desc: A (pos, state) tuple. type: tuple. """ button, pos, time = self.mouse.get_clicked() if button == None: button = -1 if pos == None: pos = self.mouse.get_pos() return pos, button def get_input_key(self): """ Gets an input key. Returns: A list containing a single pylink key identifier. """ # Don't try to collect key presses when the display is no longer # available. This is necessary, because pylink polls key presses during # file transfer, which generally occurs after the display has been # closed. if not self.display_open: return None try: key, time = self.kb.get_key(keylist=None, timeout='default') except: self.esc_pressed = True key = 'q' if key == None: return None # Escape functions as a 'q' with the additional esc_pressed flag if key == 'escape': key = 'q' self.esc_pressed = True # Process regular keys if key == "return": keycode = pylink.ENTER_KEY self.state = None elif key == "space": keycode = ord(" ") elif key == "q": keycode = pylink.ESC_KEY self.state = None elif key == "c": keycode = ord("c") self.state = "calibration" elif key == "v": keycode = ord("v") self.state = "validation" elif key == "a": keycode = ord("a") elif key == "i": self.extra_info = not self.extra_info keycode = 0 elif key == "up": keycode = pylink.CURS_UP elif key == "down": keycode = pylink.CURS_DOWN elif key == "left": keycode = pylink.CURS_LEFT elif key == "right": keycode = pylink.CURS_RIGHT else: keycode = 0 # Convert key to PyLink keycode and return return [pylink.KeyInput(keycode, 0)] # 0 = pygame.KMOD_NONE def exit_image_display(self): """Exits the image display.""" self.clear_cal_display() def alert_printf(self,msg): """ Prints alert message. Arguments: msg -- The message to be played. """ print "eyelink_graphics.alert_printf(): %s" % msg def setup_image_display(self, width, height): """ Initializes the buffer that will contain the camera image. Arguments: width -- The width of the image. height -- The height of the image. """ self.size = width, height self.clear_cal_display() self.last_mouse_state = -1 self.imagebuffer = self.new_array() def image_title(self, text): """ Sets the current image title. Arguments: text -- An image title. """ while ': ' in text: text = text.replace(': ', ':') self.title = text.split() def draw_image_line(self, width, line, totlines, buff): """ Draws a single eye video frame, line by line. Arguments: width -- Width of the video. line -- Line nr of current line. totlines -- Total lines in video. buff -- Frame buffer. imagesize -- The size of the image, which is (usually?) 192x160 px. """ # If the buffer hasn't been filled yet, add a line. for i in range(width): try: self.imagebuffer.append(self.pal[buff[i]]) except: pass # If the buffer is full, push it to the display. if line == totlines: self.scale = totlines/320. self._size = int(self.scale*self.size[0]), int( self.scale*self.size[1]) # Convert the image buffer to a pygame image, save it ... self.cam_img = pygame.image.fromstring(self.imagebuffer.tostring(), self._size, 'RGBX') if self.extra_info: self.draw_cross_hair() self.draw_title() pygame.image.save(self.cam_img, self.tmp_file) # ... and then show the image. self.screen.clear() self.screen.draw_image(self.tmp_file, scale=1.5/self.scale) self.display.fill(self.screen) self.display.show() # Clear the buffer for the next round! self.imagebuffer = self.new_array() def set_image_palette(self, r, g, b): """ Sets the image palette. TODO: What this function actually does is highly mysterious. Figure it out! Arguments: r -- The red channel. g -- The green channel. b -- The blue channel. """ self.imagebuffer = self.new_array() self.clear_cal_display() sz = len(r) i = 0 self.pal = [] while i < sz: rf = int(b[i]) gf = int(g[i]) bf = int(r[i]) self.pal.append((rf<<16) | (gf<<8) | (bf)) i += 1
def __init__(self, display, address='192.168.71.50', udpport=49152, logfile=settings.LOGFILE, eventdetection=settings.EVENTDETECTION, saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, blink_threshold=settings.BLINKTHRESH, **args): """Initializes a TobiiProGlassesTracker instance arguments display -- a pygaze.display.Display instance keyword arguments address -- internal ipv4/ipv6 address for Tobii Pro Glasses 2 (default = '192.168.71.50', for IpV6 address use square brackets [fe80::xxxx:xxxx:xxxx:xxxx]) udpport -- UDP port number for Tobii Pro Glasses data streaming (default = 49152) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, TobiiProGlassesTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = settings.DISPSIZE # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.screendist = settings.SCREENDIST # distance between participant and screen in cm self.pixpercm = (self.dispsize[0] / float(self.screensize[0]) + self.dispsize[1] / float(self.screensize[1])) / 2.0 self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # output file properties self.outputfile = logfile self.description = "experiment" # TODO: EXPERIMENT NAME self.participant = "participant" # TODO: PP NAME # eye tracker properties self.eye_used = 0 # 0=left, 1=right, 2=binocular self.left_eye = 0 self.right_eye = 1 self.binocular = 2 self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1, -1) # validation properties self.nvalsamples = 1000 # samples for one validation point # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) self.tobiiglasses = TobiiGlassesController(udpport, address) self.triggers_values = {} self.logging = False self.current_recording_id = None self.current_participant_id = None self.current_project_id = None
class EyeTribeTracker(BaseEyeTracker): """A class for EyeTribeTracker objects""" def __init__(self, display, logfile=settings.LOGFILE, eventdetection=settings.EVENTDETECTION, saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, blink_threshold=settings.BLINKTHRESH, **args): """Initializes the EyeTribeTracker object arguments display -- a pygaze.display.Display instance keyword arguments logfile -- logfile name (string value); note that this is the name for the eye data log file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, EyeTribeTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = settings.DISPSIZE # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # output file properties self.outputfile = logfile # eye tracker properties self.connected = False self.recording = False self.errdist = 2 # degrees; maximal error for drift correction self.pxerrdist = 30 # initial error in pixels self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1, -1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # connect to the tracker self.eyetribe = EyeTribe(logfilename=logfile) # get info on the sample rate self.samplerate = self.eyetribe._samplefreq self.sampletime = 1000.0 * self.eyetribe._intsampletime # initiation report self.log("pygaze initiation report start") self.log("display resolution: {}x{}".format(self.dispsize[0], self.dispsize[1])) self.log("display size in cm: {}x{}".format(self.screensize[0], self.screensize[1])) self.log("samplerate: {} Hz".format(self.samplerate)) self.log("sampletime: {} ms".format(self.sampletime)) self.log("fixation threshold: {} degrees".format(self.fixtresh)) self.log("speed threshold: {} degrees/second".format(self.spdtresh)) self.log("acceleration threshold: {} degrees/second**2".format( self.accthresh)) self.log("pygaze initiation report end") def calibrate(self): """Calibrates the eye tracking system arguments None keyword arguments None returns success -- returns True if calibration succeeded, or False if not; in addition a calibration log is added to the log file and some properties are updated (i.e. the thresholds for detection algorithms) """ # CALIBRATION # determine the calibration points calibpoints = [] for x in [0.1, 0.5, 0.9]: for y in [0.1, 0.5, 0.9]: calibpoints.append( (int(x * self.dispsize[0]), int(y * self.dispsize[1]))) random.shuffle(calibpoints) # show a message self.screen.clear() self.screen.draw_text( text="Press Space to calibrate, S to skip, and Q to quit", fontsize=20) self.disp.fill(self.screen) self.disp.show() # wait for keyboard input key, keytime = self.kb.get_key(keylist=['q', 's', 'space'], timeout=None, flush=True) if key == 's': return True if key == 'q': quited = True else: quited = False # Pause the processing of samples during the calibration. # self.eyetribe._pause_sample_processing() # run until the user is statisfied, or quits calibrated = False calibresult = None while not quited and not calibrated: # Clear the existing calibration. if self.eyetribe._tracker.get_iscalibrated(): self.eyetribe._lock.acquire(True) self.eyetribe.calibration.clear() self.eyetribe._lock.release() # Wait for a bit. clock.pause(1500) # start a new calibration if not self.eyetribe._tracker.get_iscalibrating(): self.eyetribe._lock.acquire(True) self.eyetribe.calibration.start(pointcount=len(calibpoints)) self.eyetribe._lock.release() # loop through calibration points for cpos in calibpoints: # Check whether the calibration is already done. # (Not sure how or why, but for some reason some data # can persist between calbrations, and the tracker will # simply stop allowing further pointstart requests.) if self.eyetribe._tracker.get_iscalibrated(): break # Draw a calibration target. self.draw_calibration_target(cpos[0], cpos[1]) # wait for a bit to allow participant to start looking at # the calibration point (#TODO: space press?) clock.pause(settings.EYETRIBEPRECALIBDUR) # start calibration of point self.eyetribe._lock.acquire(True) self.eyetribe.calibration.pointstart(cpos[0], cpos[1]) self.eyetribe._lock.release() # wait for a second clock.pause(settings.EYETRIBECALIBDUR) # stop calibration of this point self.eyetribe._lock.acquire(True) self.eyetribe.calibration.pointend() self.eyetribe._lock.release() # check if the Q key has been pressed if self.kb.get_key(keylist=['q'], timeout=10, flush=False)[0] == 'q': # abort calibration self.eyetribe._lock.acquire(True) self.eyetribe.calibration.abort() self.eyetribe._lock.release() # set quited variable and break this for loop quited = True break # retry option if the calibration was aborted if quited: # show retry message self.screen.clear() self.screen.draw_text( "Calibration aborted. Press Space to restart or 'Q' to quit", fontsize=20) self.disp.fill(self.screen) self.disp.show() # get input key, keytime = self.kb.get_key(keylist=['q', 'space'], timeout=None, flush=True) if key == 'space': # unset quited Boolean quited = False # skip further processing continue # empty display self.disp.fill() self.disp.show() # allow for a bit of calculation time # (this is waaaaaay too much) clock.pause(1000) # get the calibration result self.eyetribe._lock.acquire(True) calibresult = self.eyetribe._tracker.get_calibresult() self.eyetribe._lock.release() # results # clear the screen self.screen.clear() # draw results for each point if type(calibresult) == dict: for p in calibresult['calibpoints']: # only draw the point if data was obtained if p['state'] > 0: # draw the mean error # self.screen.draw_circle(colour=(252,233,79), # pos=(p['cpx'],p['cpy']), r=p['mepix'], pw=0, # fill=True) self.screen.draw_line(spos=(p['cpx'], p['cpy']), epos=(p['mecpx'], p['mecpy']), pw=2) # draw the point self.screen.draw_fixation(fixtype='dot', colour=(115, 210, 22), pos=(p['cpx'], p['cpy'])) # draw the estimated point self.screen.draw_fixation(fixtype='dot', colour=(32, 74, 135), pos=(p['mecpx'], p['mecpy'])) # annotate accuracy self.screen.draw_text(text="{}".format(\ round(p['acd'], ndigits=2)), pos=(p['cpx']+10,p['cpy']+10), fontsize=20) # if no data was obtained, draw the point in red else: self.screen.draw_fixation(fixtype='dot', colour=(204, 0, 0), pos=(p['cpx'], p['cpy'])) # draw box for averages # self.screen.draw_rect(colour=(238,238,236), x=int(self.dispsize[0]*0.15), y=int(self.dispsize[1]*0.2), w=400, h=200, pw=0, fill=True) # draw result if calibresult['result']: self.screen.draw_text(text="Calibration successful", colour=(0, 255, 0), pos=(int(self.dispsize[0] * 0.5), int(self.dispsize[1] * 0.25)), fontsize=20) else: self.screen.draw_text(text="Calibration failed", colour=(255, 0, 0), pos=(int(self.dispsize[0] * 0.5), int(self.dispsize[1] * 0.25)), fontsize=20) # draw average accuracy self.screen.draw_text( text="Average error = {} degrees".format(round(\ calibresult['deg'], ndigits=2)), \ pos=(int(self.dispsize[0]*0.5),int(self.dispsize[1]*0.25+30)), fontsize=20) # draw input options self.screen.draw_text( text="Press Space to continue or 'R' to restart", pos=(int(self.dispsize[0] * 0.5), int(self.dispsize[1] * 0.25 + 60)), fontsize=20) else: self.screen.draw_text( text="Calibration failed. Press 'R' to try again.", fontsize=20) # show the results self.disp.fill(self.screen) self.disp.show() # wait for input key, keytime = self.kb.get_key(keylist=['space', 'r'], timeout=None, flush=True) # process input if key == 'space': calibrated = True # Continue the processing of samples after the calibration. # self.eyetribe._unpause_sample_processing() # calibration failed if the user quited if quited: return False # NOISE CALIBRATION # get all error estimates (pixels) var = [] for p in calibresult['calibpoints']: # only draw the point if data was obtained if p['state'] > 0: var.append(p['mepix']) noise = sum(var) / float(len(var)) self.pxdsttresh = (noise, noise) # AFTERMATH # store some variables pixpercm = (self.dispsize[0] / float(self.screensize[0]) + self.dispsize[1] / float(self.screensize[1])) / 2 screendist = settings.SCREENDIST # calculate thresholds based on tracker settings self.accuracy = ((calibresult['Ldeg'], calibresult['Ldeg']), (calibresult['Rdeg'], calibresult['Rdeg'])) self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm) self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm) self.pxaccuracy = ((deg2pix(screendist, self.accuracy[0][0], pixpercm), deg2pix(screendist, self.accuracy[0][1], pixpercm)), (deg2pix(screendist, self.accuracy[1][0], pixpercm), deg2pix(screendist, self.accuracy[1][1], pixpercm))) self.pxspdtresh = deg2pix(screendist, self.spdtresh / 1000.0, pixpercm) # in pixels per millisecond self.pxacctresh = deg2pix(screendist, self.accthresh / 1000.0, pixpercm) # in pixels per millisecond**2 # calibration report self.log("pygaze calibration report start") self.log("accuracy (degrees): LX={}, LY={}, RX={}, RY={}".format( self.accuracy[0][0], self.accuracy[0][1], self.accuracy[1][0], \ self.accuracy[1][1])) self.log("accuracy (in pixels): LX={}, LY={}, RX={}, RY={}".format( \ self.pxaccuracy[0][0], self.pxaccuracy[0][1], \ self.pxaccuracy[1][0], self.pxaccuracy[1][1])) self.log("precision (RMS noise in pixels): X={}, Y={}".format( \ self.pxdsttresh[0], self.pxdsttresh[1])) self.log("distance between participant and display: {} cm".format( \ screendist)) self.log("fixation threshold: {} pixels".format(self.pxfixtresh)) self.log("speed threshold: {} pixels/ms".format(self.pxspdtresh)) self.log("acceleration threshold: {} pixels/ms**2".format( \ self.pxacctresh)) self.log("pygaze calibration report end") return True def close(self): """Neatly close connection to tracker arguments None returns Nothing -- saves data and sets self.connected to False """ # close connection self.eyetribe.close() self.connected = False def connected(self): """Checks if the tracker is connected arguments None returns connected -- True if connection is established, False if not; sets self.connected to the same value """ res = self.eyetribe._tracker.get_trackerstate() if res == 0: self.connected = True else: self.connected = False return self.connected def drift_correction(self, pos=None, fix_triggered=False): """Performs a drift check arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) fix_triggered -- Boolean indicating if drift check should be performed based on gaze position (fix_triggered = True) or on spacepress (fix_triggered = False) (default = False) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 if fix_triggered: return self.fix_triggered_drift_correction(pos) self.draw_drift_correction_target(pos[0], pos[1]) pressed = False while not pressed: pressed, presstime = self.kb.get_key() if pressed: if pressed == 'escape' or pressed == 'q': print( "libeyetribe.EyeTribeTracker.drift_correction: 'q' or 'escape' pressed" ) return self.calibrate() gazepos = self.sample() if ((gazepos[0] - pos[0])**2 + (gazepos[1] - pos[1])**2)**0.5 < self.pxerrdist: return True else: self.errorbeep.play() return False def draw_drift_correction_target(self, x, y): """ Draws the drift-correction target. arguments x -- The X coordinate y -- The Y coordinate """ self.screen.clear() self.screen.draw_fixation(fixtype='dot', colour=settings.FGC, pos=(x, y), pw=0, diameter=12) self.disp.fill(self.screen) self.disp.show() def draw_calibration_target(self, x, y): self.draw_drift_correction_target(x, y) def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30): """Performs a fixation triggered drift correction by collecting a number of samples and calculating the average distance from the fixation position arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) min_samples -- minimal amount of samples after which an average deviation is calculated (default = 10) max_dev -- maximal deviation from fixation in pixels (default = 60) reset_threshold -- if the horizontal or vertical distance in pixels between two consecutive samples is larger than this threshold, the sample collection is reset (default = 30) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ self.draw_drift_correction_target(pos[0], pos[1]) if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 # loop until we have sufficient samples lx = [] ly = [] while len(lx) < min_samples: # pressing escape enters the calibration screen if self.kb.get_key()[0] in ['escape', 'q']: print( "libeyetribe.EyeTribeTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed" ) return self.calibrate() # collect a sample x, y = self.sample() if len(lx) == 0 or x != lx[-1] or y != ly[-1]: # if present sample deviates too much from previous sample, reset counting if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold): lx = [] ly = [] # collect samples else: lx.append(x) ly.append(y) if len(lx) == min_samples: avg_x = sum(lx) / len(lx) avg_y = sum(ly) / len(ly) d = ((avg_x - pos[0])**2 + (avg_y - pos[1])**2)**0.5 if d < max_dev: return True else: lx = [] ly = [] def get_eyetracker_clock_async(self): """Not supported for EyeTribeTracker (yet)""" print("function not supported yet") def log(self, msg): """Writes a message to the log file arguments ms -- a string to include in the log file returns Nothing -- uses native log function of iViewX to include a line in the log file """ self.eyetribe.log_message(msg) def prepare_drift_correction(self, pos): """Not supported for EyeTribeTracker (yet)""" print("function not supported yet") def pupil_size(self): """Return pupil size arguments None returns pupil size -- returns pupil diameter for the eye that is currently being tracked (as specified by self.eye_used) or -1 when no data is obtainable """ # get newest pupil size ps = self.eyetribe.pupil_size() # invalid data if ps == None: return -1 # check if the new pupil size is the same as the previous if ps != self.prevps: # update the pupil size self.prevps = copy.copy(ps) return self.prevps def sample(self): """Returns newest available gaze position arguments None returns sample -- an (x,y) tuple or a (-1,-1) on an error """ # get newest sample s = self.eyetribe.sample() # invalid data if s == (None, None): return (-1, -1) # check if the new sample is the same as the previous if s != self.prevsample: # update the current sample self.prevsample = copy.copy(s) return self.prevsample def send_command(self, cmd): """Sends a command to the eye tracker arguments cmd -- the command to be sent to the EyeTribe, which should be a list with the following information: [category, request, values] returns Nothing """ self.eyetribe._connection.request(cmd) def start_recording(self): """Starts recording eye position arguments None returns Nothing -- sets self.recording to True when recording is successfully started """ self.eyetribe.start_recording() self.recording = True def status_msg(self, msg): """Not supported for EyeTribeTracker (yet)""" print("function not supported yet") def stop_recording(self): """Stop recording eye position arguments None returns Nothing -- sets self.recording to False when recording is successfully started """ self.eyetribe.stop_recording() self.recording = False def set_detection_type(self, eventdetection): """Set the event detection type to either PyGaze algorithms, or native algorithms as provided by the manufacturer (only if available: detection type will default to PyGaze if no native functions are available) arguments eventdetection -- a string indicating which detection type should be employed: either 'pygaze' for PyGaze event detection algorithms or 'native' for manufacturers algorithms (only if available; will default to 'pygaze' if no native event detection is available) returns -- detection type for saccades, fixations and blinks in a tuple, e.g. ('pygaze','native','native') when 'native' was passed, but native detection was not available for saccade detection """ if eventdetection in ['pygaze', 'native']: self.eventdetection = eventdetection return ('pygaze', 'pygaze', 'pygaze') def wait_for_event(self, event): """Waits for event arguments event -- an integer event code, one of the following: 3 = STARTBLINK 4 = ENDBLINK 5 = STARTSACC 6 = ENDSACC 7 = STARTFIX 8 = ENDFIX returns outcome -- a self.wait_for_* method is called, depending on the specified event; the return values of corresponding method are returned """ if event == 5: outcome = self.wait_for_saccade_start() elif event == 6: outcome = self.wait_for_saccade_end() elif event == 7: outcome = self.wait_for_fixation_start() elif event == 8: outcome = self.wait_for_fixation_end() elif event == 3: outcome = self.wait_for_blink_start() elif event == 4: outcome = self.wait_for_blink_end() else: raise Exception( "Error in libeyetribe.EyeTribeTracker.wait_for_event: eventcode {} is not supported" .format(event)) return outcome def wait_for_blink_end(self): """Waits for a blink end and returns the blink ending time arguments None returns timestamp -- blink ending time in milliseconds, as measured from experiment begin time """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = True # loop while there is a blink while blinking: # get newest sample gazepos = self.sample() # check if it's valid if self.is_valid_sample(gazepos): # if it is a valid sample, blinking has stopped blinking = False # return timestamp of blink end return clock.get_time() def wait_for_blink_start(self): """Waits for a blink start and returns the blink starting time arguments None returns timestamp -- blink starting time in milliseconds, as measured from experiment begin time """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = False # loop until there is a blink while not blinking: # get newest sample gazepos = self.sample() # check if it's a valid sample if not self.is_valid_sample(gazepos): # get timestamp for possible blink start t0 = clock.get_time() # loop until a blink is determined, or a valid sample occurs while not self.is_valid_sample(self.sample()): # check if time has surpassed BLINKTHRESH if clock.get_time() - t0 >= self.blinkthresh: # return timestamp of blink start return t0 def wait_for_fixation_end(self): """Returns time and gaze position when a fixation has ended; function assumes that a 'fixation' has ended when a deviation of more than self.pxfixtresh from the initial fixation position has been detected (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer fixation detection; \ PyGaze algorithm will be used") # # # # # # PyGaze method # function assumes that a 'fixation' has ended when a deviation of more than fixtresh # from the initial 'fixation' position has been detected # get starting time and position stime, spos = self.wait_for_fixation_start() # loop until fixation has ended while True: # get new sample npos = self.sample() # get newest sample # check if sample is valid if self.is_valid_sample(npos): # check if sample deviates to much from starting position if (npos[0] - spos[0])**2 + ( npos[1] - spos[1])**2 > self.pxfixtresh**2: # Pythagoras # break loop if deviation is too high break return clock.get_time(), spos def wait_for_fixation_start(self): """Returns starting time and position when a fixation is started; function assumes a 'fixation' has started when gaze position remains reasonably stable (i.e. when most deviant samples are within self.pxfixtresh) for five samples in a row (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a fixation start # detection built into their API (only ending) print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer fixation detection; \ PyGaze algorithm will be used") # # # # # # PyGaze method # function assumes a 'fixation' has started when gaze position # remains reasonably stable for self.fixtimetresh # get starting position spos = self.sample() while not self.is_valid_sample(spos): spos = self.sample() # get starting time t0 = clock.get_time() # wait for reasonably stable position moving = True while moving: # get new sample npos = self.sample() # check if sample is valid if self.is_valid_sample(npos): # check if new sample is too far from starting position if (npos[0] - spos[0])**2 + ( npos[1] - spos[1])**2 > self.pxfixtresh**2: # Pythagoras # if not, reset starting position and time spos = copy.copy(npos) t0 = clock.get_time() # if new sample is close to starting sample else: # get timestamp t1 = clock.get_time() # check if fixation time threshold has been surpassed if t1 - t0 >= self.fixtimetresh: # return time and starting position return t1, spos def wait_for_saccade_end(self): """Returns ending time, starting and end position when a saccade is ended; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos, endpos -- endtime in milliseconds (from expbegintime); startpos and endpos are (x,y) gaze position tuples """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) t0, spos = self.wait_for_saccade_start() # get valid sample prevpos = self.sample() while not self.is_valid_sample(prevpos): prevpos = self.sample() # get starting time, intersample distance, and velocity t1 = clock.get_time() s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])** 2)**0.5 # = intersample distance = speed in px/sample v0 = s / (t1 - t0) # run until velocity and acceleration go below threshold saccadic = True while saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # calculate distance s = ((newpos[0] - prevpos[0])**2 + (newpos[1] - prevpos[1])** 2)**0.5 # = speed in pixels/sample # calculate velocity v1 = s / (t1 - t0) # calculate acceleration a = (v1 - v0) / ( t1 - t0 ) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample) # check if velocity and acceleration are below threshold if v1 < self.pxspdtresh and (a > -1 * self.pxacctresh and a < 0): saccadic = False epos = newpos[:] etime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return etime, spos, epos def wait_for_saccade_start(self): """Returns starting time and starting position when a saccade is started; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos -- endtime in milliseconds (from expbegintime); startpos is an (x,y) gaze position tuple """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) newpos = self.sample() while not self.is_valid_sample(newpos): newpos = self.sample() # get starting time, position, intersampledistance, and velocity t0 = clock.get_time() prevpos = newpos[:] s = 0 v0 = 0 # get samples saccadic = False while not saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # check if distance is larger than precision error sx = newpos[0] - prevpos[0] sy = newpos[1] - prevpos[1] if (sx / self.pxdsttresh[0])**2 + ( sy / self.pxdsttresh[1] )**2 > self.weightdist: # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise # calculate distance s = ((sx)**2 + (sy)** 2)**0.5 # intersampledistance = speed in pixels/ms # calculate velocity v1 = s / (t1 - t0) # calculate acceleration a = (v1 - v0) / (t1 - t0) # acceleration in pixels/ms**2 # check if either velocity or acceleration are above threshold values if v1 > self.pxspdtresh or a > self.pxacctresh: saccadic = True spos = prevpos[:] stime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return stime, spos def is_valid_sample(self, gazepos): """Checks if the sample provided is valid, based on EyeTribe specific criteria (for internal use) arguments gazepos -- a (x,y) gaze position tuple, as returned by self.sample() returns valid -- a Boolean: True on a valid sample, False on an invalid sample """ # return False if a sample is invalid if gazepos == (None, None) or gazepos == (-1, -1): return False # in any other case, the sample is valid return True
# create instances # initialize the display disp = Display() # initialize a screen scr = Screen() # initialize a keyboard kb = Keyboard(keylist=['space'],timeout=None) # initialize a mouse mouse = Mouse(mousebuttonlist=None, timeout=None) # initialize a sound snd = Sound(osc='sine', freq=4400, length=3000) sounds = { 'a sine wave (slightly oscillating)':Sound(osc='sine', freq=440, length=5000, attack=1000, decay=1000), 'a saw wave':Sound(osc='saw', freq=880, length=5000, attack=0, decay=0), 'a square wave':Sound(osc='square', freq=1760, length=5000, attack=0, decay=0), 'white noise':Sound(osc='whitenoise'), 'soundfile':Sound(soundfile=soundfile) } # initialize a Timer timer = Time() # create a new logfile log = Logfile(filename="test") log.write(["test", "time"])
# create instances # initialize the display disp = Display() # initialize a screen scr = Screen() # initialize an EyeTracker tracker = EyeTracker(disp) # initialize a keyboard kb = Keyboard(keylist=['space'], timeout=None) # initialize a sound snd = Sound(soundfile=soundfile) # initialize a Timer timer = Time() # create a new logfile log = Logfile(filename="test") log.write(["test", "time"]) # # # # # # welcome scr.draw_text("Welcome to the PyGaze Supertest!\n\nYou're going to be testing \ your PyGaze installation today, using this interactive tool. Press Space \ to start!\n\n\nP.S. If you see this, the following functions work: \ \n- Screen.draw_text \
def __init__(self, libeyelink, tracker): """ Constructor. Arguments: libeyelink -- A libeyelink object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.libeyelink = libeyelink self.display = libeyelink.display self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=0) self.mouse = Mouse(timeout=0) if settings.DISPTYPE == "pygame": self.kb.set_timeout(timeout=0.001) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. # if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), "__eyelink__.jpg") # drawing properties self.xc = self.display.dispsize[0] / 2 self.yc = self.display.dispsize[1] / 2 self.extra_info = True self.ld = 40 # line distance self.fontsize = libeyelink.fontsize self.title = "" self.display_open = True self.draw_menu_screen() # beeps self.__target_beep__ = Sound(osc="sine", freq=440, length=50, attack=0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc="sine", freq=880, length=200, attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc="sine", freq=220, length=200, attack=0, decay=0, soundfile=None) # Colors self.color = { pylink.CR_HAIR_COLOR: pygame.Color("white"), pylink.PUPIL_HAIR_COLOR: pygame.Color("white"), pylink.PUPIL_BOX_COLOR: pygame.Color("green"), pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color("red"), pylink.MOUSE_CURSOR_COLOR: pygame.Color("red"), "font": pygame.Color("white"), } # Font pygame.font.init() self.font = pygame.font.SysFont("Courier New", 11) # further properties self.state = None self.pal = None self.size = (0, 0) self.set_tracker(tracker) self.last_mouse_state = -1 self.bit64 = "64bit" in platform.architecture() self.imagebuffer = self.new_array()
def __init__(self, display, ip='127.0.0.1', sendport=4444, receiveport=5555, logfile=settings.LOGFILE, eventdetection=settings.EVENTDETECTION, saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, **args): """Initializes the SMItracker object arguments display -- a pygaze.display.Display instance keyword arguments ip -- internal ip address for iViewX (default = '127.0.0.1') sendport -- port number for iViewX sending (default = 4444) receiveport -- port number for iViewX receiving (default = 5555) logfile -- logfile name (string value); note that this is the name for the SMI logfile, NOT the .idf file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, SMITracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = settings.DISPSIZE # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # output file properties self.outputfile = logfile self.description = "experiment" # TODO: EXPERIMENT NAME self.participant = "participant" # TODO: PP NAME # eye tracker properties self.connected = False self.recording = False self.eye_used = 0 # 0=left, 1=right, 2=binocular self.left_eye = 0 self.right_eye = 1 self.binocular = 2 self.errdist = 2 # degrees; maximal error for drift correction self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1, -1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # set logger res = iViewXAPI.iV_SetLogger(c_int(1), c_char_p(logfile + '_SMILOG.txt')) if res != 1: err = errorstring(res) raise Exception( "Error in libsmi.SMItracker.__init__: failed to set logger; %s" % err) # first logger argument is for logging type (I'm guessing these are decimal bit codes) # LOG status bitcode # 1 = LOG_LEVEL_BUG 00001 # 2 = LOG_LEVEL_iV_FCT 00010 # 4 = LOG_LEVEL_ETCOM 00100 # 8 = LOG_LEVEL_ALL 01000 # 16 = LOG_LEVEL_IV_COMMAND 10000 # these can be used together, using a bitwise or, e.g.: 1|2|4 (bitcode 00111) # connect to iViewX res = iViewXAPI.iV_Connect(c_char_p(ip), c_int(sendport), c_char_p(ip), c_int(receiveport)) if res == 1: res = iViewXAPI.iV_GetSystemInfo(byref(systemData)) self.samplerate = systemData.samplerate self.sampletime = 1000.0 / self.samplerate if res != 1: err = errorstring(res) raise Exception( "Error in libsmi.SMItracker.__init__: failed to get system information; %s" % err) # handle connection errors else: self.connected = False err = errorstring(res) raise Exception( "Error in libsmi.SMItracker.__init__: establishing connection failed; %s" % err) # initiation report self.log("pygaze initiation report start") self.log("experiment: %s" % self.description) self.log("participant: %s" % self.participant) self.log("display resolution: %sx%s" % (self.dispsize[0], self.dispsize[1])) self.log("display size in cm: %sx%s" % (self.screensize[0], self.screensize[1])) self.log("samplerate: %s Hz" % self.samplerate) self.log("sampletime: %s ms" % self.sampletime) self.log("fixation threshold: %s degrees" % self.fixtresh) self.log("speed threshold: %s degrees/second" % self.spdtresh) self.log("acceleration threshold: %s degrees/second**2" % self.accthresh) self.log("pygaze initiation report end")
class EyeTribeTracker(BaseEyeTracker): """A class for EyeTribeTracker objects""" def __init__(self, display, logfile=LOGFILE, eventdetection=EVENTDETECTION, \ saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, \ **args): """Initializes the EyeTribeTracker object arguments display -- a pygaze.display.Display instance keyword arguments logfile -- logfile name (string value); note that this is the name for the eye data log file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, EyeTribeTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = DISPSIZE # display size in pixels self.screensize = SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw',freq=100, length=100) # output file properties self.outputfile = logfile # eye tracker properties self.connected = False self.recording = False self.errdist = 2 # degrees; maximal error for drift correction self.pxerrdist = 30 # initial error in pixels self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1,-1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # connect to the tracker self.eyetribe = EyeTribe(logfilename=logfile) # get info on the sample rate self.samplerate = self.eyetribe._samplefreq self.sampletime = 1000.0 * self.eyetribe._intsampletime # initiation report self.log("pygaze initiation report start") self.log("display resolution: %sx%s" % (self.dispsize[0],self.dispsize[1])) self.log("display size in cm: %sx%s" % (self.screensize[0],self.screensize[1])) self.log("samplerate: %.2f Hz" % self.samplerate) self.log("sampletime: %.2f ms" % self.sampletime) self.log("fixation threshold: %s degrees" % self.fixtresh) self.log("speed threshold: %s degrees/second" % self.spdtresh) self.log("acceleration threshold: %s degrees/second**2" % self.accthresh) self.log("pygaze initiation report end") def calibrate(self): """Calibrates the eye tracking system arguments None keyword arguments None returns success -- returns True if calibration succeeded, or False if not; in addition a calibration log is added to the log file and some properties are updated (i.e. the thresholds for detection algorithms) """ # CALIBRATION # determine the calibration points calibpoints = [] for x in [0.1,0.5,0.9]: for y in [0.1,0.5,0.9]: calibpoints.append((int(x*self.dispsize[0]),int(y*self.dispsize[1]))) random.shuffle(calibpoints) # show a message self.screen.clear() self.screen.draw_text(text="Press Space to start the calibration or Q to quit.") self.disp.fill(self.screen) self.disp.show() # wait for keyboard input key, keytime = self.kb.get_key(keylist=['q','space'], timeout=None, flush=True) if key == 'q': quited = True else: quited = False # run until the user is statisfied, or quits calibrated = False calibresult = None while not quited and not calibrated: # start a new calibration self.eyetribe.calibration.start(pointcount=len(calibpoints)) # loop through calibration points for cpos in calibpoints: self.draw_calibration_target(cpos[0], cpos[1]) # wait for a bit to allow participant to start looking at # the calibration point (#TODO: space press?) clock.pause(1000) # start calibration of point self.eyetribe.calibration.pointstart(cpos[0],cpos[1]) # wait for a second clock.pause(1000) # stop calibration of this point result = self.eyetribe.calibration.pointend() # the final calibration point returns a dict (does it?) if type(result) == dict: calibresult = copy.deepcopy(result) # check if the Q key has been pressed if self.kb.get_key(keylist=['q'],timeout=10,flush=False)[0] == 'q': # abort calibration self.eyetribe.calibration.abort() # set quited variable and break this for loop quited = True break # retry option if the calibration was aborted if quited: # show retry message self.screen.clear() self.screen.draw_text("Calibration aborted. Press Space to restart, or 'Q' to quit.") self.disp.fill(self.screen) self.disp.show() # get input key, keytime = self.kb.get_key(keylist=['q','space'], timeout=None, flush=True) if key == 'space': # unset quited Boolean quited = False # skip further processing continue # get the calibration result if it was not obtained yet if type(calibresult) != dict: # empty display self.disp.fill() self.disp.show() # allow for a bit of calculation time clock.pause(2000) # get the result calibresult = self.eyetribe._tracker.get_calibresult() # results # clear the screen self.screen.clear() # draw results for each point if type(calibresult) == dict: for p in calibresult['calibpoints']: # only draw the point if data was obtained if p['state'] > 0: # draw the mean error self.screen.draw_circle(colour=(252,233,79), pos=(p['cpx'],p['cpy']), r=p['mepix'], pw=0, fill=True) # draw the point self.screen.draw_fixation(fixtype='dot', colour=(115,210,22), pos=(p['cpx'],p['cpy'])) # draw the estimated point self.screen.draw_fixation(fixtype='dot', colour=(32,74,135), pos=(p['mecpx'],p['mecpy'])) # annotate accuracy self.screen.draw_text(text=str(p['acd']), pos=(p['cpx']+10,p['cpy']+10), fontsize=12) # if no data was obtained, draw the point in red else: self.screen.draw_fixation(fixtype='dot', colour=(204,0,0), pos=(p['cpx'],p['cpy'])) # draw box for averages self.screen.draw_rect(colour=(238,238,236), x=int(self.dispsize[0]*0.15), y=int(self.dispsize[1]*0.2), w=400, h=200, pw=0, fill=True) # draw result if calibresult['result']: self.screen.draw_text(text="calibration is successful", colour=(115,210,22), pos=(int(self.dispsize[0]*0.25),int(self.dispsize[1]*0.25)), fontsize=12) else: self.screen.draw_text(text="calibration failed", colour=(204,0,0), pos=(int(self.dispsize[0]*0.25),int(self.dispsize[1]*0.25)), fontsize=12) # draw average accuracy self.screen.draw_text(text="average error = %.2f degrees" % (calibresult['deg']), colour=(211,215,207), pos=(int(self.dispsize[0]*0.25),int(self.dispsize[1]*0.25+20)), fontsize=12) # draw input options self.screen.draw_text(text="Press Space to continue, or 'R' to restart.", colour=(211,215,207), pos=(int(self.dispsize[0]*0.25),int(self.dispsize[1]*0.25+40)), fontsize=12) else: self.screen.draw_text(text="Calibration failed, press 'R' to try again.") # show the results self.disp.fill(self.screen) self.disp.show() # wait for input key, keytime = self.kb.get_key(keylist=['space','r'], timeout=None, flush=True) # process input if key == 'space': calibrated = True # calibration failed if the user quited if quited: return False # NOISE CALIBRATION # get all error estimates (pixels) var = [] for p in calibresult['calibpoints']: # only draw the point if data was obtained if p['state'] > 0: var.append(p['mepix']) noise = sum(var) / float(len(var)) self.pxdsttresh = (noise, noise) # AFTERMATH # store some variables pixpercm = (self.dispsize[0]/float(self.screensize[0]) + self.dispsize[1]/float(self.screensize[1])) / 2 screendist = SCREENDIST # calculate thresholds based on tracker settings self.accuracy = ((calibresult['Ldeg'],calibresult['Ldeg']), (calibresult['Rdeg'],calibresult['Rdeg'])) self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm) self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm) self.pxaccuracy = ((deg2pix(screendist, self.accuracy[0][0], pixpercm),deg2pix(screendist, self.accuracy[0][1], pixpercm)), (deg2pix(screendist, self.accuracy[1][0], pixpercm),deg2pix(screendist, self.accuracy[1][1], pixpercm))) self.pxspdtresh = deg2pix(screendist, self.spdtresh/1000.0, pixpercm) # in pixels per millisecond self.pxacctresh = deg2pix(screendist, self.accthresh/1000.0, pixpercm) # in pixels per millisecond**2 # calibration report self.log("pygaze calibration report start") self.log("accuracy (degrees): LX=%s, LY=%s, RX=%s, RY=%s" % (self.accuracy[0][0],self.accuracy[0][1],self.accuracy[1][0],self.accuracy[1][1])) self.log("accuracy (in pixels): LX=%s, LY=%s, RX=%s, RY=%s" % (self.pxaccuracy[0][0],self.pxaccuracy[0][1],self.pxaccuracy[1][0],self.pxaccuracy[1][1])) self.log("precision (RMS noise in pixels): X=%s, Y=%s" % (self.pxdsttresh[0],self.pxdsttresh[1])) self.log("distance between participant and display: %s cm" % screendist) self.log("fixation threshold: %s pixels" % self.pxfixtresh) self.log("speed threshold: %s pixels/ms" % self.pxspdtresh) self.log("acceleration threshold: %s pixels/ms**2" % self.pxacctresh) self.log("pygaze calibration report end") return True def close(self): """Neatly close connection to tracker arguments None returns Nothing -- saves data and sets self.connected to False """ # close connection self.eyetribe.close() self.connected = False def connected(self): """Checks if the tracker is connected arguments None returns connected -- True if connection is established, False if not; sets self.connected to the same value """ res = self.eyetribe._tracker.get_trackerstate() if res == 0: self.connected = True else: self.connected = False return self.connected def drift_correction(self, pos=None, fix_triggered=False): """Performs a drift check arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) fix_triggered -- Boolean indicating if drift check should be performed based on gaze position (fix_triggered = True) or on spacepress (fix_triggered = False) (default = False) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 if fix_triggered: return self.fix_triggered_drift_correction(pos) self.draw_drift_correction_target(pos[0], pos[1]) pressed = False while not pressed: pressed, presstime = self.kb.get_key() if pressed: if pressed == 'escape' or pressed == 'q': print("libeyetribe.EyeTribeTracker.drift_correction: 'q' or 'escape' pressed") return self.calibrate() gazepos = self.sample() if ((gazepos[0]-pos[0])**2 + (gazepos[1]-pos[1])**2)**0.5 < self.pxerrdist: return True else: self.errorbeep.play() return False def draw_drift_correction_target(self, x, y): """ Draws the drift-correction target. arguments x -- The X coordinate y -- The Y coordinate """ self.screen.clear() self.screen.draw_fixation(fixtype='dot', colour=FGC, pos=(x,y), pw=0, diameter=12) self.disp.fill(self.screen) self.disp.show() def draw_calibration_target(self, x, y): self.draw_drift_correction_target(x, y) def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30): """Performs a fixation triggered drift correction by collecting a number of samples and calculating the average distance from the fixation position arguments None keyword arguments pos -- (x, y) position of the fixation dot or None for a central fixation (default = None) min_samples -- minimal amount of samples after which an average deviation is calculated (default = 10) max_dev -- maximal deviation from fixation in pixels (default = 60) reset_threshold -- if the horizontal or vertical distance in pixels between two consecutive samples is larger than this threshold, the sample collection is reset (default = 30) returns checked -- Boolaan indicating if drift check is ok (True) or not (False); or calls self.calibrate if 'q' or 'escape' is pressed """ self.draw_drift_correction_target(pos[0], pos[1]) if pos == None: pos = self.dispsize[0] / 2, self.dispsize[1] / 2 # loop until we have sufficient samples lx = [] ly = [] while len(lx) < min_samples: # pressing escape enters the calibration screen if self.kb.get_key()[0] in ['escape','q']: print("libeyetribe.EyeTribeTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed") return self.calibrate() # collect a sample x, y = self.sample() if len(lx) == 0 or x != lx[-1] or y != ly[-1]: # if present sample deviates too much from previous sample, reset counting if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold): lx = [] ly = [] # collect samples else: lx.append(x) ly.append(y) if len(lx) == min_samples: avg_x = sum(lx) / len(lx) avg_y = sum(ly) / len(ly) d = ((avg_x - pos[0]) ** 2 + (avg_y - pos[1]) ** 2)**0.5 if d < max_dev: return True else: lx = [] ly = [] def get_eyetracker_clock_async(self): """Not supported for EyeTribeTracker (yet)""" print("function not supported yet") def log(self, msg): """Writes a message to the log file arguments ms -- a string to include in the log file returns Nothing -- uses native log function of iViewX to include a line in the log file """ self.eyetribe.log_message(msg) def log_var(self, var, val): """Writes a variable to the log file arguments var -- variable name val -- variable value returns Nothing -- uses native log function of iViewX to include a line in the log file in a "var NAME VALUE" layout """ msg = "var %s %s" % (var, val) self.log(msg) def prepare_drift_correction(self, pos): """Not supported for EyeTribeTracker (yet)""" print("function not supported yet") def pupil_size(self): """Return pupil size arguments None returns pupil size -- returns pupil diameter for the eye that is currently being tracked (as specified by self.eye_used) or -1 when no data is obtainable """ # get newest pupil size ps = self.eyetribe.pupil_size() # invalid data if ps == None: return -1 # check if the new pupil size is the same as the previous if ps != self.prevps: # update the pupil size self.prevps = copy.copy(ps) return self.prevps def sample(self): """Returns newest available gaze position arguments None returns sample -- an (x,y) tuple or a (-1,-1) on an error """ # get newest sample s = self.eyetribe.sample() # invalid data if s == (None,None): return (-1,-1) # check if the new sample is the same as the previous if s != self.prevsample: # update the current sample self.prevsample = copy.copy(s) return self.prevsample def send_command(self, cmd): """Sends a command to the eye tracker arguments cmd -- the command to be sent to the EyeTribe, which should be a list with the following information: [category, request, values] returns Nothing """ self.eyetribe._connection.request(cmd) def start_recording(self): """Starts recording eye position arguments None returns Nothing -- sets self.recording to True when recording is successfully started """ self.eyetribe.start_recording() self.recording = True def status_msg(self, msg): """Not supported for EyeTribeTracker (yet)""" print("function not supported yet") def stop_recording(self): """Stop recording eye position arguments None returns Nothing -- sets self.recording to False when recording is successfully started """ self.eyetribe.stop_recording() self.recording = False def set_detection_type(self, eventdetection): """Set the event detection type to either PyGaze algorithms, or native algorithms as provided by the manufacturer (only if available: detection type will default to PyGaze if no native functions are available) arguments eventdetection -- a string indicating which detection type should be employed: either 'pygaze' for PyGaze event detection algorithms or 'native' for manufacturers algorithms (only if available; will default to 'pygaze' if no native event detection is available) returns -- detection type for saccades, fixations and blinks in a tuple, e.g. ('pygaze','native','native') when 'native' was passed, but native detection was not available for saccade detection """ if eventdetection in ['pygaze','native']: self.eventdetection = eventdetection return ('pygaze','pygaze','pygaze') def wait_for_event(self, event): """Waits for event arguments event -- an integer event code, one of the following: 3 = STARTBLINK 4 = ENDBLINK 5 = STARTSACC 6 = ENDSACC 7 = STARTFIX 8 = ENDFIX returns outcome -- a self.wait_for_* method is called, depending on the specified event; the return values of corresponding method are returned """ if event == 5: outcome = self.wait_for_saccade_start() elif event == 6: outcome = self.wait_for_saccade_end() elif event == 7: outcome = self.wait_for_fixation_start() elif event == 8: outcome = self.wait_for_fixation_end() elif event == 3: outcome = self.wait_for_blink_start() elif event == 4: outcome = self.wait_for_blink_end() else: raise Exception("Error in libsmi.SMItracker.wait_for_event: eventcode %s is not supported" % event) return outcome def wait_for_blink_end(self): """Waits for a blink end and returns the blink ending time arguments None returns timestamp -- blink ending time in milliseconds, as measured from experiment begin time """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = True # loop while there is a blink while blinking: # get newest sample gazepos = self.sample() # check if it's valid if self.is_valid_sample(gazepos): # if it is a valid sample, blinking has stopped blinking = False # return timestamp of blink end return clock.get_time() def wait_for_blink_start(self): """Waits for a blink start and returns the blink starting time arguments None returns timestamp -- blink starting time in milliseconds, as measured from experiment begin time """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer blink detection; PyGaze algorithm \ will be used") # # # # # # PyGaze method blinking = False # loop until there is a blink while not blinking: # get newest sample gazepos = self.sample() # check if it's a valid sample if not self.is_valid_sample(gazepos): # get timestamp for possible blink start t0 = clock.get_time() # loop until a blink is determined, or a valid sample occurs while not self.is_valid_sample(self.sample()): # check if time has surpassed 150 ms if clock.get_time()-t0 >= 150: # return timestamp of blink start return t0 def wait_for_fixation_end(self): """Returns time and gaze position when a fixation has ended; function assumes that a 'fixation' has ended when a deviation of more than self.pxfixtresh from the initial fixation position has been detected (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer fixation detection; \ PyGaze algorithm will be used") # # # # # # PyGaze method # function assumes that a 'fixation' has ended when a deviation of more than fixtresh # from the initial 'fixation' position has been detected # get starting time and position stime, spos = self.wait_for_fixation_start() # loop until fixation has ended while True: # get new sample npos = self.sample() # get newest sample # check if sample is valid if self.is_valid_sample(npos): # check if sample deviates to much from starting position if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras # break loop if deviation is too high break return clock.get_time(), spos def wait_for_fixation_start(self): """Returns starting time and position when a fixation is started; function assumes a 'fixation' has started when gaze position remains reasonably stable (i.e. when most deviant samples are within self.pxfixtresh) for five samples in a row (self.pxfixtresh is created in self.calibration, based on self.fixtresh, a property defined in self.__init__) arguments None returns time, gazepos -- time is the starting time in milliseconds (from expstart), gazepos is a (x,y) gaze position tuple of the position from which the fixation was initiated """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a fixation start # detection built into their API (only ending) print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer fixation detection; \ PyGaze algorithm will be used") # # # # # # PyGaze method # function assumes a 'fixation' has started when gaze position # remains reasonably stable for self.fixtimetresh # get starting position spos = self.sample() while not self.is_valid_sample(spos): spos = self.sample() # get starting time t0 = clock.get_time() # wait for reasonably stable position moving = True while moving: # get new sample npos = self.sample() # check if sample is valid if self.is_valid_sample(npos): # check if new sample is too far from starting position if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras # if not, reset starting position and time spos = copy.copy(npos) t0 = clock.get_time() # if new sample is close to starting sample else: # get timestamp t1 = clock.get_time() # check if fixation time threshold has been surpassed if t1 - t0 >= self.fixtimetresh: # return time and starting position return t1, spos def wait_for_saccade_end(self): """Returns ending time, starting and end position when a saccade is ended; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos, endpos -- endtime in milliseconds (from expbegintime); startpos and endpos are (x,y) gaze position tuples """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) t0, spos = self.wait_for_saccade_start() # get valid sample prevpos = self.sample() while not self.is_valid_sample(prevpos): prevpos = self.sample() # get starting time, intersample distance, and velocity t1 = clock.get_time() s = ((prevpos[0]-spos[0])**2 + (prevpos[1]-spos[1])**2)**0.5 # = intersample distance = speed in px/sample v0 = s / (t1-t0) # run until velocity and acceleration go below threshold saccadic = True while saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # calculate distance s = ((newpos[0]-prevpos[0])**2 + (newpos[1]-prevpos[1])**2)**0.5 # = speed in pixels/sample # calculate velocity v1 = s / (t1-t0) # calculate acceleration a = (v1-v0) / (t1-t0) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample) # check if velocity and acceleration are below threshold if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and a < 0): saccadic = False epos = newpos[:] etime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return etime, spos, epos def wait_for_saccade_start(self): """Returns starting time and starting position when a saccade is started; based on Dalmaijer et al. (2013) online saccade detection algorithm arguments None returns endtime, startpos -- endtime in milliseconds (from expbegintime); startpos is an (x,y) gaze position tuple """ # # # # # # EyeTribe method if self.eventdetection == 'native': # print warning, since EyeTribe does not have a blink detection # built into their API print("WARNING! 'native' event detection has been selected, \ but EyeTribe does not offer saccade detection; PyGaze \ algorithm will be used") # # # # # # PyGaze method # get starting position (no blinks) newpos = self.sample() while not self.is_valid_sample(newpos): newpos = self.sample() # get starting time, position, intersampledistance, and velocity t0 = clock.get_time() prevpos = newpos[:] s = 0 v0 = 0 # get samples saccadic = False while not saccadic: # get new sample newpos = self.sample() t1 = clock.get_time() if self.is_valid_sample(newpos) and newpos != prevpos: # check if distance is larger than precision error sx = newpos[0]-prevpos[0]; sy = newpos[1]-prevpos[1] if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 > self.weightdist: # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise # calculate distance s = ((sx)**2 + (sy)**2)**0.5 # intersampledistance = speed in pixels/ms # calculate velocity v1 = s / (t1-t0) # calculate acceleration a = (v1-v0) / (t1-t0) # acceleration in pixels/ms**2 # check if either velocity or acceleration are above threshold values if v1 > self.pxspdtresh or a > self.pxacctresh: saccadic = True spos = prevpos[:] stime = clock.get_time() # update previous values t0 = copy.copy(t1) v0 = copy.copy(v1) # udate previous sample prevpos = newpos[:] return stime, spos def is_valid_sample(self, gazepos): """Checks if the sample provided is valid, based on EyeTribe specific criteria (for internal use) arguments gazepos -- a (x,y) gaze position tuple, as returned by self.sample() returns valid -- a Boolean: True on a valid sample, False on an invalid sample """ # return False if a sample is invalid if gazepos == (None,None) or gazepos == (-1,-1): return False # in any other case, the sample is valid return True
def __init__(self, display, logfile=settings.LOGFILE, \ alea_key=settings.ALEAKEY, \ animated_calibration=settings.ALEAANIMATEDCALIBRATION, \ eventdetection=settings.EVENTDETECTION, \ saccade_velocity_threshold=35, \ saccade_acceleration_threshold=9500, \ blink_threshold=settings.BLINKTHRESH, \ **args): """Initializes the AleaTracker object arguments display -- a pygaze.display.Display instance keyword arguments logfile -- logfile name (string value); note that this is the name for the eye data log file (default = LOGFILE) """ # try to copy docstrings (but ignore it if it fails, as we do # not need it for actual functioning of the code) try: copy_docstr(BaseEyeTracker, AleaTracker) except: # we're not even going to show a warning, since the copied # docstring is useful for code editors; these load the docs # in a non-verbose manner, so warning messages would be lost pass # object properties self.disp = display self.screen = Screen() self.dispsize = self.disp.dispsize # display size in pixels self.screensize = settings.SCREENSIZE # display size in cm self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1) self.errorbeep = Sound(osc='saw', freq=100, length=100) # show a message self.screen.clear() self.screen.draw_text( text="Initialising the eye tracker, please wait...", fontsize=20) self.disp.fill(self.screen) self.disp.show() # output file properties self.outputfile = logfile + '.tsv' # calibration properties self.animated_calibration = animated_calibration == True # eye tracker properties self.connected = False self.recording = False self.errdist = 2 # degrees; maximal error for drift correction self.pxerrdist = 30 # initial error in pixels self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording) self.prevsample = (-1, -1) self.prevps = -1 # event detection properties self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped) self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method self.eventdetection = eventdetection self.set_detection_type(self.eventdetection) self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected) # connect to the tracker self.alea = OGAleaTracker(alea_key, file_path=self.outputfile) # get info on the sample rate # TODO: Compute after streaming some samples? self.samplerate = 60.0 self.sampletime = 1000.0 / self.samplerate # initiation report self.log("pygaze initiation report start") self.log("display resolution: {}x{}".format( \ self.dispsize[0], self.dispsize[1])) self.log("display size in cm: {}x{}".format( \ self.screensize[0], self.screensize[1])) self.log("samplerate: {} Hz".format(self.samplerate)) self.log("sampletime: {} ms".format(self.sampletime)) self.log("fixation threshold: {} degrees".format(self.fixtresh)) self.log("speed threshold: {} degrees/second".format(self.spdtresh)) self.log("acceleration threshold: {} degrees/second**2".format( \ self.accthresh)) self.log("pygaze initiation report end")
def __init__(self, libeyelink, tracker): """ Constructor. Arguments: libeyelink -- A libeyelink object. tracker -- An tracker object as returned by pylink.EyeLink(). """ pylink.EyeLinkCustomDisplay.__init__(self) # objects self.libeyelink = libeyelink self.display = libeyelink.display self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False) self.kb = Keyboard(keylist=None, timeout=1) self.mouse = Mouse(timeout=1) # If we are using a DISPTYPE that cannot be used directly, we have to # save the camera image to a temporary file on each frame. #if DISPTYPE not in ('pygame', 'psychopy'): import tempfile import os self.tmp_file = os.path.join(tempfile.gettempdir(), "__eyelink__.jpg") # drawing properties self.xc = self.display.dispsize[0] / 2 self.yc = self.display.dispsize[1] / 2 self.extra_info = True self.ld = 40 # line distance self.fontsize = libeyelink.fontsize self.title = "" self.display_open = True self.draw_menu_screen() # A crosshair is drawn onto the eye image. This should be scaled in # pylink 1.1.0.5 (tested on Python 2.7) but not on pylink 1.11.0.0 # (tested on Python 3.6). I'm not sure when this change happened, so # it's quite likely we'll have to update the minor version used here. pl_version = pylink.__version__.split(".") if int(pl_version[0]) > 1 or int(pl_version[1]) >= 11: self.scale_lines_in_eye_image = False else: self.scale_lines_in_eye_image = True # Beeps self.__target_beep__ = Sound(osc="sine", freq=440, length=50, attack=0, decay=0, soundfile=None) self.__target_beep__done__ = Sound(osc="sine", freq=880, length=200, attack=0, decay=0, soundfile=None) self.__target_beep__error__ = Sound(osc="sine", freq=220, length=200, attack=0, decay=0, soundfile=None) # Colors self.color = { pylink.CR_HAIR_COLOR: pygame.Color("white"), pylink.PUPIL_HAIR_COLOR: pygame.Color("white"), pylink.PUPIL_BOX_COLOR: pygame.Color("green"), pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color("red"), pylink.MOUSE_CURSOR_COLOR: pygame.Color("red"), 'font': pygame.Color("white"), } # Font pygame.font.init() self.font = pygame.font.SysFont("Courier New", 11) # further properties self.state = None self.pal = None self.size = (0, 0) self.set_tracker(tracker) self.last_mouse_state = -1 self.bit64 = "64bit" in platform.architecture() self.imagebuffer = self.new_array()