Example #1
0
	def __init__(self, display):

		"""Initiates an eyetracker dummy object, that simulates gaze position using the mouse
		
		arguments
		display		--	a pygaze display.Display instance
		
		keyword arguments
		None
		"""

		# try to copy docstrings (but ignore it if it fails, as we do
		# not need it for actual functioning of the code)
		try:
			copy_docstr(BaseEyeTracker, Dummy)
		except:
			# we're not even going to show a warning, since the copied
			# docstring is useful for code editors; these load the docs
			# in a non-verbose manner, so warning messages would be lost
			pass

		self.recording = False
		self.blinking = False
		self.bbpos = (settings.DISPSIZE[0]/2, settings.DISPSIZE[1]/2)
		self.resolution = settings.DISPSIZE[:]
		self.simulator = Mouse(disptype=settings.DISPTYPE, mousebuttonlist=None,
			timeout=2, visible=False)
		self.kb = Keyboard(disptype=settings.DISPTYPE, keylist=None,
			timeout=None)
		self.angrybeep = Sound(osc='saw',freq=100, length=100, attack=0,
			decay=0, soundfile=None)
		self.display = display
		self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False)
Example #2
0
    def confirm_abort_experiment(self):
        """
		Asks for confirmation before aborting the experiment. Displays a
		confirmation screen, collects the response, and acts accordingly.

		Exceptions:
		Raises a response_error upon confirmation.

		Returns:
		False if no confirmation was given.
		"""

        # Display the confirmation screen
        scr = Screen(disptype=DISPTYPE)
        kb = Keyboard(timeout=5000)
        yc = DISPSIZE[1] / 2
        xc = DISPSIZE[0] / 2
        ld = 40  # Line height
        scr.draw_text(u'Really abort experiment?', pos=(xc, yc - 3 * ld))
        scr.draw_text(u'Press \'Y\' to abort', pos=(xc, yc - 0.5 * ld))
        scr.draw_text(u'Press any other key or wait 5s to go to setup', \
         pos=(xc, yc+0.5*ld))
        self.display.fill(scr)
        self.display.show()
        # process the response:
        try:
            key, time = kb.get_key()
        except:
            return False
        # if confirmation, close experiment
        if key == u'y':
            raise Exception(u'The experiment was aborted')
        self.eyelink_graphics.esc_pressed = False
        return False
Example #3
0
	def __init__(self, display):

		"""Initiates an eyetracker dummy object, that simulates gaze position using the mouse
		
		arguments
		display		--	a pygaze display.Display instance
		
		keyword arguments
		None
		"""

		# try to copy docstrings (but ignore it if it fails, as we do
		# not need it for actual functioning of the code)
		try:
			copy_docstr(BaseEyeTracker, Dummy)
		except:
			# we're not even going to show a warning, since the copied
			# docstring is useful for code editors; these load the docs
			# in a non-verbose manner, so warning messages would be lost
			pass

		self.recording = False
		self.blinking = False
		self.bbpos = (settings.DISPSIZE[0]/2, settings.DISPSIZE[1]/2)
		self.resolution = settings.DISPSIZE[:]
		self.simulator = Mouse(disptype=settings.DISPTYPE, mousebuttonlist=None,
			timeout=2, visible=False)
		self.kb = Keyboard(disptype=settings.DISPTYPE, keylist=None,
			timeout=None)
		self.angrybeep = Sound(osc='saw',freq=100, length=100, attack=0,
			decay=0, soundfile=None)
		self.display = display
		self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False)
Example #4
0
    def __init__(self, display):
        """Initiates a 'dumb dummy' object, that doesn't do a thing
		
		arguments
		display		--	a pygaze display.Display instance
		
		keyword arguments
		None
		"""

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docsafe_decode(BaseEyeTracker, DumbDummy)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        self.recording = False
        self.blinking = False
        self.bbpos = (settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2)

        self.display = display
        self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False)
Example #5
0
	def confirm_abort_experiment(self):

		"""
		Asks for confirmation before aborting the experiment. Displays a
		confirmation screen, collects the response, and acts accordingly.

		Exceptions:
		Raises a response_error upon confirmation.

		Returns:
		False if no confirmation was given.
		"""

		# Display the confirmation screen
		scr = Screen(disptype=DISPTYPE)
		kb = Keyboard(timeout=5000)
		yc = DISPSIZE[1]/2
		xc = DISPSIZE[0]/2
		ld = 40 # Line height
		scr.draw_text(u'Really abort experiment?', pos=(xc, yc-3*ld))
		scr.draw_text(u'Press \'Y\' to abort', pos=(xc, yc-0.5*ld))
		scr.draw_text(u'Press any other key or wait 5s to go to setup', \
			pos=(xc, yc+0.5*ld))
		self.display.fill(scr)
		self.display.show()
		# process the response:
		try:
			key, time = kb.get_key()
		except:
			return False
		# if confirmation, close experiment
		if key == u'y':
			raise Exception(u'The experiment was aborted')
		self.eyelink_graphics.esc_pressed = False
		return False
Example #6
0
	def draw_menu_screen(self):

		"""
		desc:
			Draws the menu screen.
		"""

		self.menuscreen = Screen(disptype=settings.DISPTYPE, mousevisible=False)
		self.menuscreen.draw_text(text="Eyelink calibration menu",
			pos=(self.xc,self.yc-6*self.ld), center=True, font='mono',
			fontsize=int(2*self.fontsize), antialias=True)
		self.menuscreen.draw_text(text="%s (pygaze %s, pylink %s)" \
			% (self.libeyelink.eyelink_model, pygaze.version,
			pylink.__version__), pos=(self.xc,self.yc-5*self.ld), center=True,
			font='mono', fontsize=int(.8*self.fontsize), antialias=True)
		self.menuscreen.draw_text(text="Press C to calibrate",
			pos=(self.xc, self.yc-3*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press V to validate",
			pos=(self.xc, self.yc-2*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press A to auto-threshold",
			pos=(self.xc,self.yc-1*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press I to toggle extra info in camera image",
			pos=(self.xc,self.yc-0*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press Enter to show camera image",
			pos=(self.xc,self.yc+1*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(
			text="(then change between images using the arrow keys)",
			pos=(self.xc, self.yc+2*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press Escape to abort experiment",
			pos=(self.xc, self.yc+4*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press Q to exit menu",
			pos=(self.xc, self.yc+5*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
Example #7
0
class SMItracker(BaseEyeTracker):
    """A class for SMI eye tracker objects"""
    def __init__(self,
                 display,
                 ip='127.0.0.1',
                 sendport=4444,
                 receiveport=5555,
                 logfile=settings.LOGFILE,
                 eventdetection=settings.EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 **args):
        """Initializes the SMItracker object
		
		arguments
		display	-- a pygaze.display.Display instance
		
		keyword arguments
		ip		-- internal ip address for iViewX (default = 
				   '127.0.0.1')
		sendport	-- port number for iViewX sending (default = 4444)
		receiveport	-- port number for iViewX receiving (default = 5555)
		logfile	-- logfile name (string value); note that this is the
				   name for the SMI logfile, NOT the .idf file
				   (default = LOGFILE)
		"""

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, SMITracker)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        # object properties
        self.disp = display
        self.screen = Screen()
        self.dispsize = settings.DISPSIZE  # display size in pixels
        self.screensize = settings.SCREENSIZE  # display size in cm
        self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
        self.errorbeep = Sound(osc='saw', freq=100, length=100)

        # output file properties
        self.outputfile = logfile
        self.description = "experiment"  # TODO: EXPERIMENT NAME
        self.participant = "participant"  # TODO: PP NAME

        # eye tracker properties
        self.connected = False
        self.recording = False
        self.eye_used = 0  # 0=left, 1=right, 2=binocular
        self.left_eye = 0
        self.right_eye = 1
        self.binocular = 2
        self.errdist = 2  # degrees; maximal error for drift correction
        self.maxtries = 100  # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        self.fixtresh = 1.5  # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100  # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
        self.spdtresh = saccade_velocity_threshold  # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold  # degrees per second**2; saccade acceleration threshold
        self.eventdetection = eventdetection
        self.set_detection_type(self.eventdetection)
        self.weightdist = 10  # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

        # set logger
        res = iViewXAPI.iV_SetLogger(c_int(1),
                                     c_char_p(logfile + '_SMILOG.txt'))
        if res != 1:
            err = errorstring(res)
            raise Exception(
                "Error in libsmi.SMItracker.__init__: failed to set logger; %s"
                % err)
        # first logger argument is for logging type (I'm guessing these are decimal bit codes)
        # LOG status					bitcode
        # 1 = LOG_LEVEL_BUG			 00001
        # 2 = LOG_LEVEL_iV_FCT		  00010
        # 4 = LOG_LEVEL_ETCOM		   00100
        # 8 = LOG_LEVEL_ALL			 01000
        # 16 = LOG_LEVEL_IV_COMMAND	 10000
        # these can be used together, using a bitwise or, e.g.: 1|2|4 (bitcode 00111)

        # connect to iViewX
        res = iViewXAPI.iV_Connect(c_char_p(ip), c_int(sendport), c_char_p(ip),
                                   c_int(receiveport))
        if res == 1:
            res = iViewXAPI.iV_GetSystemInfo(byref(systemData))
            self.samplerate = systemData.samplerate
            self.sampletime = 1000.0 / self.samplerate
            if res != 1:
                err = errorstring(res)
                raise Exception(
                    "Error in libsmi.SMItracker.__init__: failed to get system information; %s"
                    % err)
        # handle connection errors
        else:
            self.connected = False
            err = errorstring(res)
            raise Exception(
                "Error in libsmi.SMItracker.__init__: establishing connection failed; %s"
                % err)

        # initiation report
        self.log("pygaze initiation report start")
        self.log("experiment: %s" % self.description)
        self.log("participant: %s" % self.participant)
        self.log("display resolution: %sx%s" %
                 (self.dispsize[0], self.dispsize[1]))
        self.log("display size in cm: %sx%s" %
                 (self.screensize[0], self.screensize[1]))
        self.log("samplerate: %s Hz" % self.samplerate)
        self.log("sampletime: %s ms" % self.sampletime)
        self.log("fixation threshold: %s degrees" % self.fixtresh)
        self.log("speed threshold: %s degrees/second" % self.spdtresh)
        self.log("acceleration threshold: %s degrees/second**2" %
                 self.accthresh)
        self.log("pygaze initiation report end")

    def calibrate(self, calibrate=True, validate=True):
        """Calibrates the eye tracking system
		
		arguments
		None
		
		keyword arguments
		calibrate	-- Boolean indicating if calibration should be
				   performed (default = True)
		validate	-- Boolean indicating if validation should be performed
				   (default = True)
		
		returns
		success	-- returns True if calibration succeeded, or False if
				   not; in addition a calibration log is added to the
				   log file and some properties are updated (i.e. the
				   thresholds for detection algorithms)
		"""

        # TODO:
        # add feedback for calibration (e.g. with iV_GetAccuracyImage (struct ImageStruct * imageData) for accuracy and iV_GetEyeImage for cool eye pictures)
        # example: res = iViewXAPI.iV_GetEyeImage(byref(imageData))
        # ImageStruct has four data fields:
        # imageHeight	-- int vertical size (px)
        # imageWidth	-- int horizontal size (px)
        # imageSize		-- int image data size (byte)
        # imageBuffer	-- pointer to image data (I have NO idea what format this is in)

        # configure calibration (NOT starting it)
        calibrationData = CCalibration(
            9, 1, 0, 1, 1, 0, 127, 1, 15, b""
        )  # (method (i.e.: number of points), visualization, display, speed, auto, fg, bg, shape, size, filename)

        # setup calibration
        res = iViewXAPI.iV_SetupCalibration(byref(calibrationData))
        if res != 1:
            err = errorstring(res)
            raise Exception(
                "Error in libsmi.SMItracker.calibrate: failed to setup calibration; %s"
                % err)

        # calibrate
        cres = iViewXAPI.iV_Calibrate()

        # validate if calibration returns succes
        if cres == 1:
            cerr = None
            vres = iViewXAPI.iV_Validate()
            # handle validation errors
            if vres != 1:
                verr = errorstring(vres)
            else:
                verr = None
##				# TEST #
##				res = iViewXAPI.iV_GetAccuracyImage(byref(imageData))
##				self.log("IMAGEBUFFERSTART")
##				self.log(imageData.imageBuffer)
##				self.log("IMAGEBUFFERSTOP")
##				print("Image height: %s, image width: %s, image size: %s" % (imageData.imageHeight,imageData.imageWidth, imageData.imageSize))
##				print imageData.imageBuffer
##				########
# handle calibration errors
        else:
            cerr = errorstring(cres)

        # return succes
        if cerr == None:
            print("libsmi.SMItracker.calibrate: calibration was succesful")
            if verr == None:
                print("libsmi.SMItracker.calibrate: validation was succesful")

                # present instructions
                self.disp.fill()  # clear display
                self.screen.draw_text(
                    text=
                    "Noise calibration: please look at the dot\n\n(press space to start)",
                    pos=(self.dispsize[0] / 2, int(self.dispsize[1] * 0.2)),
                    center=True)
                self.screen.draw_fixation(fixtype='dot')
                self.disp.fill(self.screen)
                self.disp.show()
                self.screen.clear()  # clear screen again

                # wait for spacepress
                self.kb.get_key(keylist=['space'], timeout=None)

                # show fixation
                self.disp.fill()
                self.screen.draw_fixation(fixtype='dot')
                self.disp.fill(self.screen)
                self.disp.show()
                self.screen.clear()

                # wait for a bit, to allow participant to fixate
                clock.pause(500)

                # get samples
                sl = [
                    self.sample()
                ]  # samplelist, prefilled with 1 sample to prevent sl[-1] from producing an error; first sample will be ignored for RMS calculation
                t0 = clock.get_time()  # starting time
                while clock.get_time() - t0 < 1000:
                    s = self.sample()  # sample
                    if s != sl[-1] and s != (-1, -1) and s != (0, 0):
                        sl.append(s)
                # calculate RMS noise
                Xvar = []
                Yvar = []
                for i in range(2, len(sl)):
                    Xvar.append((sl[i][0] - sl[i - 1][0])**2)
                    Yvar.append((sl[i][1] - sl[i - 1][1])**2)
                XRMS = (sum(Xvar) / len(Xvar))**0.5
                YRMS = (sum(Yvar) / len(Yvar))**0.5
                self.pxdsttresh = (XRMS, YRMS)

                # calculate pixels per cm
                pixpercm = (self.dispsize[0] / float(self.screensize[0]) +
                            self.dispsize[1] / float(self.screensize[1])) / 2
                # get accuracy
                res = 0
                i = 0
                while res != 1 and i < self.maxtries:  # multiple tries, in case no (valid) sample is available
                    res = iViewXAPI.iV_GetAccuracy(
                        byref(accuracyData), 0)  # 0 is for 'no visualization'
                    i += 1
                    clock.pause(int(self.sampletime))  # wait for sampletime
                if res == 1:
                    self.accuracy = (
                        (accuracyData.deviationLX, accuracyData.deviationLY),
                        (accuracyData.deviationLX, accuracyData.deviationLY)
                    )  # dsttresh = (left tuple, right tuple); tuple = (horizontal deviation, vertical deviation) in degrees of visual angle
                else:
                    err = errorstring(res)
                    print(
                        "WARNING libsmi.SMItracker.calibrate: failed to obtain accuracy data; %s"
                        % err)
                    self.accuracy = ((2, 2), (2, 2))
                    print(
                        "libsmi.SMItracker.calibrate: As an estimate, the intersample distance threshhold was set to it's default value of 2 degrees"
                    )
                # get distance from screen to eyes (information from tracker)
                res = 0
                i = 0
                while res != 1 and i < self.maxtries:  # multiple tries, in case no (valid) sample is available
                    res = iViewXAPI.iV_GetSample(byref(sampleData))
                    i += 1
                    clock.pause(int(self.sampletime))  # wait for sampletime
                if res == 1:
                    screendist = sampleData.leftEye.eyePositionZ / 10.0  # eyePositionZ is in mm; screendist is in cm
                else:
                    err = errorstring(res)
                    print(
                        "WARNING libsmi.SMItracker.calibrate: failed to obtain screen distance; %s"
                        % err)
                    screendist = settings.SCREENDIST
                    print(
                        "libsmi.SMItracker.calibrate: As an estimate, the screendistance was set to it's default value of 57 cm"
                    )
                # calculate thresholds based on tracker settings
                self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm)
                self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm)
                self.pxaccuracy = ((deg2pix(screendist, self.accuracy[0][0],
                                            pixpercm),
                                    deg2pix(screendist, self.accuracy[0][1],
                                            pixpercm)),
                                   (deg2pix(screendist, self.accuracy[1][0],
                                            pixpercm),
                                    deg2pix(screendist, self.accuracy[1][1],
                                            pixpercm)))
                self.pxspdtresh = deg2pix(
                    screendist, self.spdtresh / 1000.0,
                    pixpercm)  # in pixels per millisecond
                self.pxacctresh = deg2pix(
                    screendist, self.accthresh / 1000.0,
                    pixpercm)  # in pixels per millisecond**2

                # calibration report
                self.log("pygaze calibration report start")
                self.log("accuracy (degrees): LX=%s, LY=%s, RX=%s, RY=%s" %
                         (self.accuracy[0][0], self.accuracy[0][1],
                          self.accuracy[1][0], self.accuracy[1][1]))
                self.log("accuracy (in pixels): LX=%s, LY=%s, RX=%s, RY=%s" %
                         (self.pxaccuracy[0][0], self.pxaccuracy[0][1],
                          self.pxaccuracy[1][0], self.pxaccuracy[1][1]))
                self.log("precision (RMS noise in pixels): X=%s, Y=%s" %
                         (self.pxdsttresh[0], self.pxdsttresh[1]))
                self.log("distance between participant and display: %s cm" %
                         screendist)
                self.log("fixation threshold: %s pixels" % self.pxfixtresh)
                self.log("speed threshold: %s pixels/ms" % self.pxspdtresh)
                self.log("acceleration threshold: %s pixels/ms**2" %
                         self.pxacctresh)
                self.log("pygaze calibration report end")

                return True

            # validation error
            else:
                print(
                    "WARNING libsmi.SMItracker.calibrate: validation was unsuccesful %s"
                    % verr)
                return False

        # calibration error
        else:
            print(
                "WARNING libsmi.SMItracker.calibrate: calibration was unsuccesful; %s"
                % cerr)
            return False

    def close(self):
        """Neatly close connection to tracker
		
		arguments
		None
		
		returns
		Nothing	-- saves data and sets self.connected to False
		"""

        # save data
        res = iViewXAPI.iV_SaveData(str(self.outputfile),
                                    str(self.description),
                                    str(self.participant), 1)
        if res != 1:
            err = errorstring(res)
            raise Exception(
                "Error in libsmi.SMItracker.close: failed to save data; %s" %
                err)

        # close connection
        iViewXAPI.iV_Disconnect()
        self.connected = False

    def connected(self):
        """Checks if the tracker is connected
		
		arguments
		None
		
		returns
		connected	-- True if connection is established, False if not;
				   sets self.connected to the same value
		"""

        res = iViewXAPI.iV_IsConnected()

        if res == 1:
            self.connected = True
        else:
            self.connected = False

        return self.connected

    def drift_correction(self, pos=None, fix_triggered=False):
        """Performs a drift check
		
		arguments
		None
		
		keyword arguments
		pos			-- (x, y) position of the fixation dot or None for
					   a central fixation (default = None)
		fix_triggered	-- Boolean indicating if drift check should be
					   performed based on gaze position (fix_triggered
					   = True) or on spacepress (fix_triggered = 
					   False) (default = False)
		
		returns
		checked		-- Boolaan indicating if drift check is ok (True)
					   or not (False); or calls self.calibrate if 'q'
					   or 'escape' is pressed
		"""

        if fix_triggered:
            return self.fix_triggered_drift_correction(pos)

        if pos == None:
            pos = self.dispsize[0] / 2, self.dispsize[1] / 2

        pressed = False
        while not pressed:
            pressed, presstime = self.kb.get_key()
            if pressed:
                if pressed == 'escape' or pressed == 'q':
                    print(
                        "libsmi.SMItracker.drift_correction: 'q' or 'escape' pressed"
                    )
                    return self.calibrate(calibrate=True, validate=True)
                gazepos = self.sample()
                if ((gazepos[0] - pos[0])**2 +
                    (gazepos[1] - pos[1])**2)**0.5 < self.pxerrdist:
                    return True
                else:
                    self.errorbeep.play()
        return False

    def fix_triggered_drift_correction(self,
                                       pos=None,
                                       min_samples=10,
                                       max_dev=60,
                                       reset_threshold=30):
        """Performs a fixation triggered drift correction by collecting
		a number of samples and calculating the average distance from the
		fixation position
		
		arguments
		None
		
		keyword arguments
		pos			-- (x, y) position of the fixation dot or None for
					   a central fixation (default = None)
		min_samples		-- minimal amount of samples after which an
					   average deviation is calculated (default = 10)
		max_dev		-- maximal deviation from fixation in pixels
					   (default = 60)
		reset_threshold	-- if the horizontal or vertical distance in
					   pixels between two consecutive samples is
					   larger than this threshold, the sample
					   collection is reset (default = 30)
		
		returns
		checked		-- Boolaan indicating if drift check is ok (True)
					   or not (False); or calls self.calibrate if 'q'
					   or 'escape' is pressed
		"""

        if pos == None:
            pos = self.dispsize[0] / 2, self.dispsize[1] / 2

        # loop until we have sufficient samples
        lx = []
        ly = []
        while len(lx) < min_samples:

            # pressing escape enters the calibration screen
            if self.kb.get_key()[0] in ['escape', 'q']:
                print(
                    "libsmi.SMItracker.fix_triggered_drift_correction: 'q' or 'escape' pressed"
                )
                return self.calibrate(calibrate=True, validate=True)

            # collect a sample
            x, y = self.sample()

            if len(lx) == 0 or x != lx[-1] or y != ly[-1]:

                # if present sample deviates too much from previous sample, reset counting
                if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold
                                    or abs(y - ly[-1]) > reset_threshold):
                    lx = []
                    ly = []

                # collect samples
                else:
                    lx.append(x)
                    ly.append(y)

            if len(lx) == min_samples:

                avg_x = sum(lx) / len(lx)
                avg_y = sum(ly) / len(ly)
                d = ((avg_x - pos[0])**2 + (avg_y - pos[1])**2)**0.5

                if d < max_dev:
                    return True
                else:
                    lx = []
                    ly = []

    def get_eyetracker_clock_async(self):
        """Not supported for SMItracker (yet)"""

        print("function not supported yet")

    def log(self, msg):
        """Writes a message to the log file
		
		arguments
		ms		-- a string to include in the log file
		
		returns
		Nothing	-- uses native log function of iViewX to include a line
				   in the log file
		"""

        res = iViewXAPI.iV_Log(c_char_p(msg))
        if res != 1:
            err = errorstring(res)
            print(
                "WARNING libsmi.SMItracker.log: failed to log message '%s'; %s"
                % (msg, err))

    def log_var(self, var, val):
        """Writes a variable to the log file
		
		arguments
		var		-- variable name
		val		-- variable value
		
		returns
		Nothing	-- uses native log function of iViewX to include a line
				   in the log file in a "var NAME VALUE" layout
		"""

        msg = "var %s %s" % (var, val)

        res = iViewXAPI.iV_Log(c_char_p(msg))
        if res != 1:
            err = errorstring(res)
            print(
                "WARNING libsmi.SMItracker.log_var: failed to log variable '%s' with value '%s'; %s"
                % (var, val, err))

    def prepare_backdrop(self):
        """Not supported for SMItracker (yet)"""

        print("function not supported yet")

    def prepare_drift_correction(self, pos):
        """Not supported for SMItracker (yet)"""

        print("function not supported yet")

    def pupil_size(self):
        """Return pupil size
		
		arguments
		None
		
		returns
		pupil size	-- returns pupil diameter for the eye that is currently
				   being tracked (as specified by self.eye_used) or -1
				   when no data is obtainable
		"""

        res = iViewXAPI.iV_GetSample(byref(sampleData))

        # if a new sample exists
        if res == 1:
            # left eye
            if self.eye_used == self.left_eye:
                ps = sampleData.leftEye.diam
            # right eye
            else:
                ps = sampleData.rightEye.diam
            # set prvious pupil size to newest pupil size
            self.prevps = ps

            return ps

        # no new sample available
        elif res == 2:

            return self.prevps

        # invalid data
        else:
            # print warning to interpreter
            err = errorstring(res)
            print(
                "WARNING libsmi.SMItracker.pupil_size: failed to obtain sample; %s"
                % err)

            return -1

    def sample(self):
        """Returns newest available gaze position
		
		arguments
		None
		
		returns
		sample	-- an (x,y) tuple or a (-1,-1) on an error
		"""

        res = iViewXAPI.iV_GetSample(byref(sampleData))

        if self.eye_used == self.right_eye:
            newsample = sampleData.rightEye.gazeX, sampleData.rightEye.gazeY
        else:
            newsample = sampleData.leftEye.gazeX, sampleData.leftEye.gazeY

        if res == 1:
            self.prevsample = newsample[:]
            return newsample
        elif res == 2:
            return self.prevsample
        else:
            err = errorstring(res)
            print(
                "WARNING libsmi.SMItracker.sample: failed to obtain sample; %s"
                % err)
            return (-1, -1)

    def send_command(self, cmd):
        """Sends a command to the eye tracker
		
		arguments
		cmd		-- the command (a string value) to be sent to iViewX
		
		returns
		Nothing
		"""

        try:
            iViewXAPI.iV_SendCommand(c_char_p(cmd))
        except:
            raise Exception(
                "Error in libsmi.SMItracker.send_command: failed to send remote command to iViewX (iV_SendCommand might be deprecated)"
            )

    def set_backdrop(self):
        """Not supported for SMItracker (yet)"""

        print("function not supported yet")

    def set_eye_used(self):
        """Logs the eye_used variable, based on which eye was specified
		(if both eyes are being tracked, the left eye is used)
		
		arguments
		None
		
		returns
		Nothing	-- logs which eye is used by calling self.log_var, e.g.
				   self.log_var("eye_used", "right")
		"""

        if self.eye_used == self.right_eye:
            self.log_var("eye_used", "right")
        else:
            self.log_var("eye_used", "left")

    def start_recording(self):
        """Starts recording eye position
		
		arguments
		None
		
		returns
		Nothing	-- sets self.recording to True when recording is
				   successfully started
		"""

        res = 0
        i = 0
        while res != 1 and i < self.maxtries:
            res = iViewXAPI.iV_StartRecording()
            i += 1

        if res == 1:
            self.recording = True
        else:
            self.recording = False
            err = errorstring(res)
            raise Exception("Error in libsmi.SMItracker.start_recording: %s" %
                            err)

    def status_msg(self, msg):
        """Not supported for SMItracker (yet)"""

        print("function not supported yet")

    def stop_recording(self):
        """Stop recording eye position
		
		arguments
		None
		
		returns
		Nothing	-- sets self.recording to False when recording is
				   successfully started
		"""

        res = 0
        i = 0
        while res != 1 and i < self.maxtries:
            res = iViewXAPI.iV_StopRecording()
            i += 1

        if res == 1:
            self.recording = False
        else:
            self.recording = False
            err = errorstring(res)
            raise Exception("Error in libsmi.SMItracker.stop_recording: %s" %
                            err)

    def set_detection_type(self, eventdetection):
        """Set the event detection type to either PyGaze algorithms, or
		native algorithms as provided by the manufacturer (only if
		available: detection type will default to PyGaze if no native
		functions are available)
		
		arguments
		eventdetection	--	a string indicating which detection type
						should be employed: either 'pygaze' for
						PyGaze event detection algorithms or
						'native' for manufacturers algorithms (only
						if available; will default to 'pygaze' if no
						native event detection is available)
		returns		--	detection type for saccades, fixations and
						blinks in a tuple, e.g. 
						('pygaze','native','native') when 'native'
						was passed, but native detection was not
						available for saccade detection
		"""

        if eventdetection in ['pygaze', 'native']:
            self.eventdetection = eventdetection

        return ('pygaze', 'native', 'pygaze')

    def wait_for_event(self, event):
        """Waits for event
		
		arguments
		event		-- an integer event code, one of the following:
					3 = STARTBLINK
					4 = ENDBLINK
					5 = STARTSACC
					6 = ENDSACC
					7 = STARTFIX
					8 = ENDFIX
		
		returns
		outcome	-- a self.wait_for_* method is called, depending on the
				   specified event; the return values of corresponding
				   method are returned
		"""

        if event == 5:
            outcome = self.wait_for_saccade_start()
        elif event == 6:
            outcome = self.wait_for_saccade_end()
        elif event == 7:
            outcome = self.wait_for_fixation_start()
        elif event == 8:
            outcome = self.wait_for_fixation_end()
        elif event == 3:
            outcome = self.wait_for_blink_start()
        elif event == 4:
            outcome = self.wait_for_blink_end()
        else:
            raise Exception(
                "Error in libsmi.SMItracker.wait_for_event: eventcode %s is not supported"
                % event)

        return outcome

    def wait_for_blink_end(self):
        """Waits for a blink end and returns the blink ending time
		
		arguments
		None
		
		returns
		timestamp		--	blink ending time in milliseconds, as
						measured from experiment begin time
		"""

        # # # # #
        # SMI method

        if self.eventdetection == 'native':

            # print warning, since SMI does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer blink detection; PyGaze algorithm \
				will be used")

        # # # # #
        # PyGaze method

        blinking = True

        # loop while there is a blink
        while blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's valid
            if self.is_valid_sample(gazepos):
                # if it is a valid sample, blinking has stopped
                blinking = False

        # return timestamp of blink end
        return clock.get_time()

    def wait_for_blink_start(self):
        """Waits for a blink start and returns the blink starting time
		
		arguments
		None
		
		returns
		timestamp		--	blink starting time in milliseconds, as
						measured from experiment begin time
		"""

        # # # # #
        # SMI method

        if self.eventdetection == 'native':

            # print warning, since SMI does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer blink detection; PyGaze algorithm \
				will be used")

        # # # # #
        # PyGaze method

        blinking = False

        # loop until there is a blink
        while not blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's a valid sample
            if not self.is_valid_sample(gazepos):
                # get timestamp for possible blink start
                t0 = clock.get_time()
                # loop until a blink is determined, or a valid sample occurs
                while not self.is_valid_sample(self.sample()):
                    # check if time has surpassed 150 ms
                    if clock.get_time() - t0 >= 150:
                        # return timestamp of blink start
                        return t0

    def wait_for_fixation_end(self):
        """Returns time and gaze position when a fixation has ended;
		function assumes that a 'fixation' has ended when a deviation of
		more than self.pxfixtresh from the initial fixation position has
		been detected (self.pxfixtresh is created in self.calibration,
		based on self.fixtresh, a property defined in self.__init__)
		
		arguments
		None
		
		returns
		time, gazepos	-- time is the starting time in milliseconds (from
					   expstart), gazepos is a (x,y) gaze position
					   tuple of the position from which the fixation
					   was initiated
		"""

        # # # # #
        # SMI method

        if self.eventdetection == 'native':

            moving = True
            while moving:
                # get newest event
                res = 0
                while res != 1:
                    res = iViewXAPI.iV_GetEvent(byref(eventData))
                    stime = clock.get_time()
                # check if event is a fixation (SMI only supports
                # fixations at the moment)
                if eventData.eventType == 'F':
                    # get timestamp and starting position
                    timediff = stime - (int(eventData.startTime) / 1000.0)
                    etime = timediff + (int(eventData.endTime) / 1000.0
                                        )  # time is in microseconds
                    fixpos = (evenData.positionX, evenData.positionY)
                    # return starting time and position
                    return etime, fixpos

        # # # # #
        # PyGaze method

        else:

            # function assumes that a 'fixation' has ended when a deviation of more than fixtresh
            # from the initial 'fixation' position has been detected

            # get starting time and position
            stime, spos = self.wait_for_fixation_start()

            # loop until fixation has ended
            while True:
                # get new sample
                npos = self.sample()  # get newest sample
                # check if sample is valid
                if self.is_valid_sample(npos):
                    # check if sample deviates to much from starting position
                    if (npos[0] - spos[0])**2 + (
                            npos[1] -
                            spos[1])**2 > self.pxfixtresh**2:  # Pythagoras
                        # break loop if deviation is too high
                        break

            return clock.get_time(), spos

    def wait_for_fixation_start(self):
        """Returns starting time and position when a fixation is started;
		function assumes a 'fixation' has started when gaze position
		remains reasonably stable (i.e. when most deviant samples are
		within self.pxfixtresh) for five samples in a row (self.pxfixtresh
		is created in self.calibration, based on self.fixtresh, a property
		defined in self.__init__)
		
		arguments
		None
		
		returns
		time, gazepos	-- time is the starting time in milliseconds (from
					   expstart), gazepos is a (x,y) gaze position
					   tuple of the position from which the fixation
					   was initiated
		"""

        # # # # #
        # SMI method

        if self.eventdetection == 'native':

            # print warning, since SMI does not have a fixation start
            # detection built into their API (only ending)

            print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer fixation START detection (only \
				fixation ENDING; PyGaze algorithm will be used")

        # # # # #
        # PyGaze method

        # function assumes a 'fixation' has started when gaze position
        # remains reasonably stable for self.fixtimetresh

        # get starting position
        spos = self.sample()
        while not self.is_valid_sample(spos):
            spos = self.sample()

        # get starting time
        t0 = clock.get_time()

        # wait for reasonably stable position
        moving = True
        while moving:
            # get new sample
            npos = self.sample()
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if new sample is too far from starting position
                if (npos[0] - spos[0])**2 + (
                        npos[1] -
                        spos[1])**2 > self.pxfixtresh**2:  # Pythagoras
                    # if not, reset starting position and time
                    spos = copy.copy(npos)
                    t0 = clock.get_time()
                # if new sample is close to starting sample
                else:
                    # get timestamp
                    t1 = clock.get_time()
                    # check if fixation time threshold has been surpassed
                    if t1 - t0 >= self.fixtimetresh:
                        # return time and starting position
                        return t1, spos

    def wait_for_saccade_end(self):
        """Returns ending time, starting and end position when a saccade is
		ended; based on Dalmaijer et al. (2013) online saccade detection
		algorithm
		
		arguments
		None
		
		returns
		endtime, startpos, endpos	-- endtime in milliseconds (from 
							   expbegintime); startpos and endpos
							   are (x,y) gaze position tuples
		"""

        # # # # #
        # SMI method

        if self.eventdetection == 'native':

            # print warning, since SMI does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer saccade detection; PyGaze \
				algorithm will be used")

        # # # # #
        # PyGaze method

        # get starting position (no blinks)
        t0, spos = self.wait_for_saccade_start()
        # get valid sample
        prevpos = self.sample()
        while not self.is_valid_sample(prevpos):
            prevpos = self.sample()
        # get starting time, intersample distance, and velocity
        t1 = clock.get_time()
        s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])**
             2)**0.5  # = intersample distance = speed in px/sample
        v0 = s / (t1 - t0)

        # run until velocity and acceleration go below threshold
        saccadic = True
        while saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # calculate distance
                s = ((newpos[0] - prevpos[0])**2 + (newpos[1] - prevpos[1])**
                     2)**0.5  # = speed in pixels/sample
                # calculate velocity
                v1 = s / (t1 - t0)
                # calculate acceleration
                a = (v1 - v0) / (
                    t1 - t0
                )  # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample)
                # check if velocity and acceleration are below threshold
                if v1 < self.pxspdtresh and (a > -1 * self.pxacctresh
                                             and a < 0):
                    saccadic = False
                    epos = newpos[:]
                    etime = clock.get_time()
                # update previous values
                t0 = copy.copy(t1)
                v0 = copy.copy(v1)
            # udate previous sample
            prevpos = newpos[:]

        return etime, spos, epos

    def wait_for_saccade_start(self):
        """Returns starting time and starting position when a saccade is
		started; based on Dalmaijer et al. (2013) online saccade detection
		algorithm
		
		arguments
		None
		
		returns
		endtime, startpos	-- endtime in milliseconds (from expbegintime);
					   startpos is an (x,y) gaze position tuple
		"""

        # # # # #
        # SMI method

        if self.eventdetection == 'native':

            # print warning, since SMI does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer saccade detection; PyGaze \
				algorithm will be used")

        # # # # #
        # PyGaze method

        # get starting position (no blinks)
        newpos = self.sample()
        while not self.is_valid_sample(newpos):
            newpos = self.sample()
        # get starting time, position, intersampledistance, and velocity
        t0 = clock.get_time()
        prevpos = newpos[:]
        s = 0
        v0 = 0

        # get samples
        saccadic = False
        while not saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # check if distance is larger than precision error
                sx = newpos[0] - prevpos[0]
                sy = newpos[1] - prevpos[1]
                if (sx / self.pxdsttresh[0])**2 + (
                        sy / self.pxdsttresh[1]
                )**2 > self.weightdist:  # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise
                    # calculate distance
                    s = ((sx)**2 + (sy)**
                         2)**0.5  # intersampledistance = speed in pixels/ms
                    # calculate velocity
                    v1 = s / (t1 - t0)
                    # calculate acceleration
                    a = (v1 - v0) / (t1 - t0)  # acceleration in pixels/ms**2
                    # check if either velocity or acceleration are above threshold values
                    if v1 > self.pxspdtresh or a > self.pxacctresh:
                        saccadic = True
                        spos = prevpos[:]
                        stime = clock.get_time()
                    # update previous values
                    t0 = copy.copy(t1)
                    v0 = copy.copy(v1)

                # udate previous sample
                prevpos = newpos[:]

        return stime, spos

    def is_valid_sample(self, gazepos):
        """Checks if the sample provided is valid, based on SMI specific
		criteria (for internal use)
		
		arguments
		gazepos		--	a (x,y) gaze position tuple, as returned by
						self.sample()
		
		returns
		valid			--	a Boolean: True on a valid sample, False on
						an invalid sample
		"""

        # return False if a sample is invalid
        if gazepos == (-1, -1):
            return False
        # sometimes, on SMI devices, invalid samples can actually contain
        # numbers; these do
        elif sum(gazepos) < 10 and 0.0 in gazepos:
            return False

        # in any other case, the sample is valid
        return True
Example #8
0
class libeyelink(BaseEyeTracker):

    MAX_TRY = 100

    def __init__(self,
                 display,
                 resolution=DISPSIZE,
                 data_file=LOGFILENAME + ".edf",
                 fg_color=FGC,
                 bg_color=BGC,
                 eventdetection=EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 force_drift_correct=True,
                 pupil_size_mode=EYELINKPUPILSIZEMODE,
                 **args):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # try to import copy docstring (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, libeyelink)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        global _eyelink

        # Make sure that we have a valid data file. The local_data_file may
        # contain a folder. The eyelink_data_file is only a basename, i.e.
        # without folder. The eyelink_data_file must be at most eight characters
        # and end with a `.edf` extension.
        self.local_data_file = data_file
        self.eyelink_data_file = os.path.basename(data_file)
        stem, ext = os.path.splitext(self.eyelink_data_file)
        if len(stem) > 8 or ext.lower() != '.edf':
            raise Exception(
                "The EyeLink cannot handle filenames longer than eight "
                "characters (excluding '.edf' extension).")

        # properties
        self.display = display
        self.fontsize = 18
        self.scr = Screen(disptype=DISPTYPE, mousevisible=False)
        self.kb = Keyboard(keylist=["escape", "q"], timeout=1)
        self.resolution = resolution
        self.recording = False
        self.saccade_velocity_treshold = saccade_velocity_threshold
        self.saccade_acceleration_treshold = saccade_acceleration_threshold
        self.eye_used = None
        self.left_eye = 0
        self.right_eye = 1
        self.binocular = 2
        self.pupil_size_mode = pupil_size_mode
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        # degrees; maximal distance from fixation start (if gaze wanders beyond
        # this, fixation has stopped)
        self.fixtresh = 1.5
        # milliseconds; amount of time gaze has to linger within self.fixtresh
        # to be marked as a fixation
        self.fixtimetresh = 100
        # degrees per second; saccade velocity threshold
        self.spdtresh = self.saccade_velocity_treshold
        # degrees per second**2; saccade acceleration threshold
        self.accthresh = self.saccade_acceleration_treshold
        self.set_detection_type(eventdetection)
        # weighted distance, used for determining whether a movement is due to
        # measurement error (1 is ok, higher is more conservative and will
        # result in only larger saccades to be detected)
        self.weightdist = 10
        # distance between participant and screen in cm
        self.screendist = SCREENDIST
        # distance between participant and screen in cm
        self.screensize = SCREENSIZE
        self.pixpercm = (self.resolution[0]/float(self.screensize[0]) + \
         self.resolution[1]/float(self.screensize[1])) / 2.0
        # only initialize eyelink once
        if _eyelink == None:
            try:
                _eyelink = pylink.EyeLink()
            except:
                raise Exception(
                    "Error in libeyelink.libeyelink.__init__(): Failed to "
                    "connect to the tracker!")
        # determine software version of tracker
        self.tracker_software_ver = 0
        self.eyelink_ver = pylink.getEYELINK().getTrackerVersion()
        if self.eyelink_ver == 3:
            tvstr = pylink.getEYELINK().getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            self.tracker_software_ver = int(float(tvstr[(vindex + \
             len("EYELINK CL")):].strip()))
        if self.eyelink_ver == 1:
            self.eyelink_model = 'EyeLink I'
        elif self.eyelink_ver == 2:
            self.eyelink_model = 'EyeLink II'
        elif self.eyelink_ver == 3:
            self.eyelink_model = 'EyeLink 1000'
        else:
            self.eyelink_model = 'EyeLink (model unknown)'
        # Open graphics
        self.eyelink_graphics = EyelinkGraphics(self, _eyelink)
        pylink.openGraphicsEx(self.eyelink_graphics)
        # Optionally force drift correction. For some reason this must be done
        # as (one of) the first things, otherwise a segmentation fault occurs.
        if force_drift_correct:
            self.send_command('driftcorrect_cr_disable = OFF')
        # Set pupil-size mode
        if self.pupil_size_mode == 'area':
            pylink.getEYELINK().setPupilSizeDiameter(False)
        elif self.pupil_size_mode == 'diameter':
            pylink.getEYELINK().setPupilSizeDiameter(True)
        else:
            raise Exception(
             "pupil_size_mode should be 'area' or 'diameter', not %s" \
             % self.pupil_size_mode)
        pylink.getEYELINK().openDataFile(self.eyelink_data_file)
        pylink.flushGetkeyQueue()
        pylink.getEYELINK().setOfflineMode()
        # notify eyelink of display resolution
        self.send_command("screen_pixel_coords = 0 0 %d %d" % \
         (self.resolution[0], self.resolution[1]))
        # get some configuration stuff
        if self.eyelink_ver >= 2:
            self.send_command("select_parser_configuration 0")
            if self.eyelink_ver == 2:  # turn off scenelink camera stuff
                self.send_command("scene_camera_gazemap = NO")
        # set EDF file contents (this specifies which data is written to the EDF
        # file)
        self.send_command(
            "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON"
        )
        if self.tracker_software_ver >= 4:
            self.send_command(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET"
            )
        else:
            self.send_command(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
        # set link data (this specifies which data is sent through the link and
        # thus can be used in gaze contingent displays)
        self.send_command(
            "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        if self.tracker_software_ver >= 4:
            self.send_command(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET"
            )
        else:
            self.send_command(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")
        # not quite sure what this means (according to Sebastiaan Mathot, it
        # might be the button that is used to end drift correction?)
        self.send_command("button_function 5 'accept_target_fixation'")

        if not self.connected():
            raise Exception(
                "Error in libeyelink.libeyelink.__init__(): Failed to connect "
                "to the eyetracker!")

    def send_command(self, cmd):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        pylink.getEYELINK().sendCommand(cmd)

    def log(self, msg):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        pylink.getEYELINK().sendMessage(msg)

    def log_var(self, var, val):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        pylink.getEYELINK().sendMessage("var %s %s" % (var, val))

    def status_msg(self, msg):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        print('status message: %s' % msg)
        pylink.getEYELINK().sendCommand("record_status_message '%s'" % msg)

    def connected(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        return pylink.getEYELINK().isConnected()

    def calibrate(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.calibrate(): Trying to "
                "calibrate after recording has started!")

        # # # # #
        # EyeLink calibration and validation

        # attempt calibrate; confirm abort when esc pressed
        while True:
            self.eyelink_graphics.esc_pressed = False
            pylink.getEYELINK().doTrackerSetup()
            if not self.eyelink_graphics.esc_pressed:
                break
            self.confirm_abort_experiment()

        # If we are using the built-in EyeLink event detection, we don't need
        # the RMS calibration routine.
        if self.eventdetection == 'native':
            return

        # # # # #
        # RMS calibration

        # present instructions
        self.display.fill()  # clear display
        self.scr.draw_text(text= \
         "Noise calibration: please look at the dot\n\n(press space to start)",
         pos=(self.resolution[0]/2, int(self.resolution[1]*0.2)),
         center=True, fontsize=self.fontsize)
        self.scr.draw_fixation(fixtype='dot')
        self.display.fill(self.scr)
        self.display.show()
        self.scr.clear()  # clear screen again

        # wait for spacepress
        self.kb.get_key(keylist=['space'], timeout=None)

        # start recording
        self.log("PYGAZE RMS CALIBRATION START")
        self.start_recording()

        # show fixation
        self.display.fill()
        self.scr.draw_fixation(fixtype='dot')
        self.display.fill(self.scr)
        self.display.show()
        self.scr.clear()

        # wait for a bit, to allow participant to fixate
        clock.pause(500)

        # get samples
        # samplelist, prefilled with 1 sample to prevent sl[-1] from producing
        # an error; first sample will be ignored for RMS calculation
        sl = [self.sample()]
        t0 = clock.get_time()  # starting time
        while clock.get_time() - t0 < 1000:
            s = self.sample()  # sample
            if s != sl[-1] and s != (-1, -1) and s != (0, 0):
                sl.append(s)

        # stop recording
        self.log("PYGAZE RMS CALIBRATION END")
        self.stop_recording()

        # calculate RMS noise
        Xvar = []
        Yvar = []
        for i in range(2, len(sl)):
            Xvar.append((sl[i][0] - sl[i - 1][0])**2)
            Yvar.append((sl[i][1] - sl[i - 1][1])**2)
        XRMS = (sum(Xvar) / len(Xvar))**0.5
        YRMS = (sum(Yvar) / len(Yvar))**0.5
        self.pxdsttresh = (XRMS, YRMS)

        # recalculate thresholds (degrees to pixels)
        self.pxfixtresh = deg2pix(self.screendist, self.fixtresh,
                                  self.pixpercm)
        self.pxspdtresh = deg2pix(
            self.screendist, self.spdtresh,
            self.pixpercm) / 1000.0  # in pixels per millisecons
        self.pxacctresh = deg2pix(
            self.screendist, self.accthresh,
            self.pixpercm) / 1000.0  # in pixels per millisecond**2

    def drift_correction(self, pos=None, fix_triggered=False):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.drift_correction(): Trying to "
                "perform drift correction after recording has started!")
        if not self.connected():
            raise Exception(
                "Error in libeyelink.libeyelink.drift_correction(): The "
                "eyelink is not connected!")
        if pos == None:
            pos = self.resolution[0] / 2, self.resolution[1] / 2
        if fix_triggered:
            return self.fix_triggered_drift_correction(pos)
        return self.manual_drift_correction(pos)

    def manual_drift_correction(self, pos):
        """
		Performs a manual, i.e. spacebar-triggered drift correction.

		Arguments:
		pos		--	The positionf or the drift-correction target.

		Returns:
		True if drift correction was successfull, False otherwise.
		"""

        self.draw_drift_correction_target(pos[0], pos[1])
        self.eyelink_graphics.esc_pressed = False
        try:
            # The 0 parameters indicate that the display should not be cleared
            # and we should not be allowed to fall back to the set-up screen.
            error = pylink.getEYELINK().doDriftCorrect(pos[0], pos[1], 0, 0)
        except:
            error = -1
        # A 0 exit code means successful drift correction
        if error == 0:
            return True
        # If escape was pressed, we present the confirm abort screen
        if self.eyelink_graphics.esc_pressed:
            self.confirm_abort_experiment()
        # If 'q' was pressed, we drop back to the calibration screen
        else:
            self.calibrate()
        return False

    def prepare_drift_correction(self, pos):
        """Puts the tracker in drift correction mode"""

        # start collecting samples in drift correction mode
        self.send_command("heuristic_filter = ON")
        self.send_command("drift_correction_targets = %d %d" % pos)
        self.send_command("start_drift_correction data = 0 0 1 0")
        pylink.msecDelay(50)
        # wait for a bit until samples start coming in (again, not sure if this
        # is indeed what's going on)
        if not pylink.getEYELINK().waitForBlockStart(100, 1, 0):
            print(
                "WARNING libeyelink.libeyelink.prepare_drift_correction(): "
                "Failed to perform drift correction (waitForBlockStart error)")

    def fix_triggered_drift_correction(self,
                                       pos=None,
                                       min_samples=30,
                                       max_dev=60,
                                       reset_threshold=10):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.fix_triggered_drift_correction(): "
                "Trying to perform drift correction after recording has started!"
            )

        self.recording = True
        if pos == None:
            pos = self.resolution[0] / 2, self.resolution[1] / 2
        self.prepare_drift_correction(pos)
        self.draw_drift_correction_target(pos[0], pos[1])

        # loop until we have enough samples
        lx = []
        ly = []
        while len(lx) < min_samples:

            # Check whether the EyeLink is put into set-up mode on the EyeLink
            # PC and, if so, jump to the calibration menu.
            if pylink.getEYELINK().getCurrentMode() == pylink.IN_SETUP_MODE:
                self.recording = False
                self.calibrate()
                print(
                    "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                    "'q' pressed")
                return False

            # pressing escape enters the calibration screen
            resp = self.kb.get_key(keylist=["escape", "q"], timeout=1)[0]
            if resp == 'escape':
                self.recording = False
                self.confirm_abort_experiment()
                print(
                    "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                    "'escape' pressed")
                return False
            elif resp == 'q':
                self.recording = False
                self.calibrate()
                print(
                    "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                    "'q' pressed")
                return False
            # collect a sample
            x, y = self.sample()
            if len(lx) == 0 or x != lx[-1] or y != ly[-1]:
                # if present sample deviates too much from previous sample,
                # start from scratch.
                if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or \
                 abs(y - ly[-1]) > reset_threshold):
                    lx = []
                    ly = []
                # Collect a sample
                else:
                    lx.append(x)
                    ly.append(y)
            # If we have enough samples to perform a drift correction ...
            if len(lx) == min_samples:
                avg_x = sum(lx) / len(lx)
                avg_y = sum(ly) / len(ly)
                d = ((avg_x - pos[0])**2 + (avg_y - pos[1])**2)**0.5
                # emulate spacebar press on succes
                pylink.getEYELINK().sendKeybutton(32, 0, pylink.KB_PRESS)
                # getCalibrationResult() returns 0 on success and an exception
                # or a non-zero value otherwise
                result = -1
                try:
                    result = pylink.getEYELINK().getCalibrationResult()
                except:
                    lx = []
                    ly = []
                    print(
                        "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                        "try again")
                if result != 0:
                    try:
                        result = pylink.getEYELINK().getCalibrationResult()
                    except:
                        lx = []
                        ly = []
                        print(
                            "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                            "try again")
        # apply drift correction
        pylink.getEYELINK().applyDriftCorrect()
        self.recording = False
        print(
            "libeyelink.libeyelink.fix_triggered_drift_correction(): success")
        return True

    def start_recording(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.recording = True
        i = 0
        while True:
            # params: write samples, write event, send samples, send events
            print(u'starting recording ...')
            error = pylink.getEYELINK().startRecording(1, 1, 1, 1)
            print(u'returned %s' % error)
            if not error:
                break
            if i > self.MAX_TRY:
                raise Exception(
                    "Error in libeyelink.libeyelink.start_recording(): Failed "
                    "to start recording!")
                self.close()
                clock.expend()
            i += 1
            print(
                ("WARNING libeyelink.libeyelink.start_recording(): Failed to "
                 "start recording (attempt %d of %d)") % (i, self.MAX_TRY))
            pylink.msecDelay(100)
        # don't know what this is
        print(u'Start realtime mode ...')
        pylink.msecDelay(100)
        pylink.beginRealTimeMode(100)
        # wait a bit until samples start coming in
        print(u'Wait for block start ...')
        pylink.msecDelay(100)
        if not pylink.getEYELINK().waitForBlockStart(100, 1, 0):
            raise Exception(
                "Error in libeyelink.libeyelink.start_recording(): Failed to "
                "start recording (waitForBlockStart error)!")
        print(u'done ...')

    def stop_recording(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        print(u'stopping recording ...')
        self.recording = False
        pylink.endRealTimeMode()
        pylink.getEYELINK().setOfflineMode()
        pylink.msecDelay(500)
        print(u'done ...')

    def close(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.eyelink_graphics.close()
        if self.recording:
            self.stop_recording()
        # close data file and transfer it to the experimental PC
        print("libeyelink.libeyelink.close(): Closing data file")
        pylink.getEYELINK().closeDataFile()
        pylink.msecDelay(500)
        print("libeyelink.libeyelink.close(): Transferring %s to %s" \
         % (self.eyelink_data_file, self.local_data_file))
        pylink.getEYELINK().receiveDataFile(self.eyelink_data_file,
                                            self.local_data_file)
        pylink.msecDelay(500)
        print("libeyelink.libeyelink.close(): Closing eyelink")
        pylink.getEYELINK().close()
        pylink.msecDelay(500)

    def set_eye_used(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.eye_used = pylink.getEYELINK().eyeAvailable()
        if self.eye_used == self.right_eye:
            self.log_var("eye_used", "right")
        elif self.eye_used == self.left_eye or self.eye_used == self.binocular:
            self.log_var("eye_used", "left")
            self.eye_used = self.left_eye
        else:
            print("WARNING libeyelink.libeyelink.set_eye_used(): Failed to "
                  "determine which eye is being recorded")

    def pupil_size(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if not self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.pupil_size(): Recording was "
                "not started before collecting eyelink data!")
        if self.eye_used == None:
            self.set_eye_used()
        # get newest sample
        s = pylink.getEYELINK().getNewestSample()
        # check if sample is new
        if s != None:
            # right eye
            if self.eye_used == self.right_eye and s.isRightSample():
                ps = s.getRightEye().getPupilSize()
            # left eye
            elif self.eye_used == self.left_eye and s.isLeftSample():
                ps = s.getLeftEye().getPupilSize()
            # invalid
            else:
                ps = -1
            # set new pupil size as previous pupil size
            self.prevps = ps
        # if no new sample is available, use old data
        else:
            ps = self.prevps
        return ps

    def sample(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if not self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.sample(): Recording was not "
                "started before collecting eyelink data!")
        if self.eye_used == None:
            self.set_eye_used()
        s = pylink.getEYELINK().getNewestSample()
        if s != None:
            if self.eye_used == self.right_eye and s.isRightSample():
                gaze = s.getRightEye().getGaze()
            elif self.eye_used == self.left_eye and s.isLeftSample():
                gaze = s.getLeftEye().getGaze()
            else:
                gaze = (-1, -1)
            self.prevsample = gaze[:]
        else:
            gaze = self.prevsample[:]
        return gaze

    def set_detection_type(self, eventdetection):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if eventdetection in ['pygaze', 'native']:
            self.eventdetection = eventdetection

        return (self.eventdetection, self.eventdetection, self.eventdetection)

    def _get_eyelink_clock_async(self):
        """
		Retrieve time differenece between tracker timestamps and 
		current clock time upheld in the pygaze environment.
		
		Note that this is not guaranteed to be a static time difference, the 
		clocks might run at different speeds. Therefore you should consider 
		running this function every time you utilize on this time difference.

		Returns:
		The tracker time minus the clock time
		"""
        return pylink.getEYELINK().trackerTime() - clock.time()

    def wait_for_event(self, event):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if not self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.wait_for_event(): Recording "
                "was not started before collecting eyelink data!")

        if self.eye_used == None:
            self.set_eye_used()
        if self.eventdetection == 'native':
            # since the link buffer was not have been polled, old data has
            # accumulated in the buffer -- so ignore events that are old:
            t0 = clock.time()  # time of call
            while True:
                d = pylink.getEYELINK().getNextData()
                if d == event:
                    float_data = pylink.getEYELINK().getFloatData()
                    # corresponding clock_time
                    tc = float_data.getTime() - self._get_eyelink_clock_async()
                    if tc > t0:
                        return tc, float_data

        if event == 5:
            outcome = self.wait_for_saccade_start()
        elif event == 6:
            outcome = self.wait_for_saccade_end()
        elif event == 7:
            outcome = self.wait_for_fixation_start()
        elif event == 8:
            outcome = self.wait_for_fixation_end()
        elif event == 3:
            outcome = self.wait_for_blink_start()
        elif event == 4:
            outcome = self.wait_for_blink_end()
        else:
            raise Exception(
                ("Error in libeyelink.libeyelink.wait_for_event: eventcode %s "
                 "is not supported") % event)
        return outcome

    def wait_for_saccade_start(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.STARTSACC)
            return t, d.getStartGaze()

        # # # # #
        # PyGaze method

        else:

            # get starting position (no blinks)
            newpos = self.sample()
            while not self.is_valid_sample(newpos):
                newpos = self.sample()
            # get starting time, position, intersampledistance, and velocity
            t0 = clock.get_time()
            prevpos = newpos[:]
            s = 0
            v0 = 0

            # get samples
            saccadic = False
            while not saccadic:
                # get new sample
                newpos = self.sample()
                t1 = clock.get_time()
                if self.is_valid_sample(newpos) and newpos != prevpos:
                    # check if distance is larger than precision error
                    sx = newpos[0] - prevpos[0]
                    sy = newpos[1] - prevpos[1]
                    # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means
                    # movement larger than RMS noise
                    if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 \
                     > self.weightdist:
                        # calculate distance
                        # intersampledistance = speed in pixels/ms
                        s = ((sx)**2 + (sy)**2)**0.5
                        # calculate velocity
                        v1 = s / (t1 - t0)
                        # calculate acceleration
                        a = (v1 - v0) / (t1 - t0
                                         )  # acceleration in pixels/ms**2
                        # check if either velocity or acceleration are above
                        # threshold values
                        if v1 > self.pxspdtresh or a > self.pxacctresh:
                            saccadic = True
                            spos = prevpos[:]
                            stime = clock.get_time()
                        # update previous values
                        t0 = copy.copy(t1)
                        v0 = copy.copy(v1)
                    # udate previous sample
                    prevpos = newpos[:]
            return stime, spos

    def wait_for_saccade_end(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.ENDSACC)
            return t, d.getStartGaze(), d.getEndGaze()

        # # # # #
        # PyGaze method

        else:

            # get starting position (no blinks)
            t0, spos = self.wait_for_saccade_start()
            # get valid sample
            prevpos = self.sample()
            while not self.is_valid_sample(prevpos):
                prevpos = self.sample()
            # get starting time, intersample distance, and velocity
            t1 = clock.get_time()
            # = intersample distance = speed in px/sample
            s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])**2)**0.5
            v0 = s / (t1 - t0)
            # run until velocity and acceleration go below threshold
            saccadic = True
            while saccadic:
                # get new sample
                newpos = self.sample()
                t1 = clock.get_time()
                if self.is_valid_sample(newpos) and newpos != prevpos:
                    # calculate distance
                    # = speed in pixels/sample
                    s = ((newpos[0]-prevpos[0])**2 + \
                     (newpos[1]-prevpos[1])**2)**0.5
                    # calculate velocity
                    v1 = s / (t1 - t0)
                    # calculate acceleration
                    # acceleration in pixels/sample**2 (actually is
                    # v1-v0 / t1-t0; but t1-t0 = 1 sample)
                    a = (v1 - v0) / (t1 - t0)
                    # check if velocity and acceleration are below threshold
                    if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and \
                     a < 0):
                        saccadic = False
                        epos = newpos[:]
                        etime = clock.get_time()
                    # update previous values
                    t0 = copy.copy(t1)
                    v0 = copy.copy(v1)
                # udate previous sample
                prevpos = newpos[:]

            return etime, spos, epos

    def wait_for_fixation_start(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.STARTFIX)
            return t, d.getTime(), d.getStartGaze()

        # # # # #
        # PyGaze method

        else:

            # function assumes a 'fixation' has started when gaze position
            # remains reasonably stable for self.fixtimetresh

            # get starting position
            spos = self.sample()
            while not self.is_valid_sample(spos):
                spos = self.sample()

            # get starting time
            t0 = clock.get_time()

            # wait for reasonably stable position
            moving = True
            while moving:
                # get new sample
                npos = self.sample()
                # check if sample is valid
                if self.is_valid_sample(npos):
                    # check if new sample is too far from starting position
                    if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > \
                     self.pxfixtresh**2: # Pythagoras
                        # if not, reset starting position and time
                        spos = copy.copy(npos)
                        t0 = clock.get_time()
                    # if new sample is close to starting sample
                    else:
                        # get timestamp
                        t1 = clock.get_time()
                        # check if fixation time threshold has been surpassed
                        if t1 - t0 >= self.fixtimetresh:
                            # return time and starting position
                            return t1, spos

    def wait_for_fixation_end(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.ENDFIX)
            return t, d.getTime(), d.getStartGaze()

        # # # # #
        # PyGaze method

        else:

            # function assumes that a 'fixation' has ended when a deviation of
            # more than fixtresh from the initial 'fixation' position has been
            # detected

            # get starting time and position
            stime, spos = self.wait_for_fixation_start()

            # loop until fixation has ended
            while True:
                # get new sample
                npos = self.sample()  # get newest sample
                # check if sample is valid
                if self.is_valid_sample(npos):
                    # check if sample deviates to much from starting position
                    if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > \
                     self.pxfixtresh**2: # Pythagoras
                        # break loop if deviation is too high
                        break

            return clock.get_time(), spos

    def wait_for_blink_start(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.STARTBLINK)
            return t, d.getTime()

        # # # # #
        # PyGaze method

        else:

            blinking = False

            # loop until there is a blink
            while not blinking:
                # get newest sample
                gazepos = self.sample()
                # check if it's a valid sample
                if not self.is_valid_sample(gazepos):
                    # get timestamp for possible blink start
                    t0 = clock.get_time()
                    # loop until a blink is determined, or a valid sample occurs
                    while not self.is_valid_sample(self.sample()):
                        # check if time has surpassed 150 ms
                        if clock.get_time() - t0 >= 150:
                            # return timestamp of blink start
                            return t0

    def wait_for_blink_end(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.ENDBLINK)
            return t

        # # # # #
        # PyGaze method

        else:

            blinking = True

            # loop while there is a blink
            while blinking:
                # get newest sample
                gazepos = self.sample()
                # check if it's valid
                if self.is_valid_sample(gazepos):
                    # if it is a valid sample, blinking has stopped
                    blinking = False

            # return timestamp of blink end
            return clock.get_time()

    def set_draw_calibration_target_func(self, func):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.eyelink_graphics.draw_cal_target = func

    # ***
    #
    # Internal functions below
    #
    # ***

    def is_valid_sample(self, gazepos):
        """
		Checks if the sample provided is valid, based on EyeLink specific
		criteria.
		
		arguments
		gazepos		--	a (x,y) gaze position tuple, as returned by
						self.sample()
		
		returns
		valid			--	a Boolean: True on a valid sample, False on
						an invalid sample
		"""

        # return False if a sample is invalid
        if gazepos == (-1, -1):
            return False

        # in any other case, the sample is valid
        return True

    def confirm_abort_experiment(self):
        """
		Asks for confirmation before aborting the experiment. Displays a
		confirmation screen, collects the response, and acts accordingly.

		Exceptions:
		Raises a response_error upon confirmation.

		Returns:
		False if no confirmation was given.
		"""

        # Display the confirmation screen
        scr = Screen(disptype=DISPTYPE)
        kb = Keyboard(timeout=5000)
        yc = DISPSIZE[1] / 2
        xc = DISPSIZE[0] / 2
        ld = 40  # Line height
        scr.draw_text(
            u'Really abort experiment?',
            pos=(xc, yc - 3 * ld),
            fontsize=self.fontsize)
        scr.draw_text(
            u'Press \'Y\' to abort',
            pos=(xc, yc - 0.5 * ld),
            fontsize=self.fontsize)
        scr.draw_text(
            u'Press any other key or wait 5s to go to setup',
            pos=(xc, yc + 0.5 * ld),
            fontsize=self.fontsize)
        self.display.fill(scr)
        self.display.show()
        # process the response:
        try:
            key, time = kb.get_key()
        except:
            return False
        # if confirmation, close experiment
        if key == u'y':
            raise Exception(u'The experiment was aborted')
        self.eyelink_graphics.esc_pressed = False
        return False

    def draw_drift_correction_target(self, x, y):
        """
		Draws the drift-correction target.
		
		arguments
		
		x		--	The X coordinate
		y		--	The Y coordinate
		"""

        self.scr.clear()
        self.scr.draw_fixation(fixtype='dot', colour=FGC, pos=(x,y), pw=0, \
         diameter=12)
        self.display.fill(self.scr)
        self.display.show()
Example #9
0
# # # # #
# directory stuff

DIR = os.path.split(os.path.abspath(__file__))[0]
soundfile = os.path.join(DIR, 'bark.ogg')
imagefile = os.path.join(DIR, 'kitten.png')

# # # # #
# create instances

# initialize the display
disp = Display()

# initialize a screen
scr = Screen()

# initialize an EyeTracker
tracker = EyeTracker(disp)

# initialize a keyboard
kb = Keyboard(keylist=['space'], timeout=None)

# initialize a sound
snd = Sound(soundfile=soundfile)

# initialize a Timer
timer = Time()

# create a new logfile
log = Logfile(filename="test")
Example #10
0
from pygaze.logfile import Logfile
from pygaze.eyetracker import EyeTracker
import pygaze.libtime as timer

from scansync.mri import MRITriggerBox


##############
# INITIALISE #
##############

# Initialise a new Display instance.
disp = Display()

# Present a start-up screen.
scr = Screen()
scr.draw_text("Loading, please wait...", fontsize=24)
disp.fill(scr)
disp.show()

# Open a new log file.
log = Logfile()
# TODO: Write header.
log.write(["trialnr", "block", "run","stim", "keypress", "go_nogo", "face_onset", "signal_onset","resp_onset", "RT", "accuracy", "respmap", "block_type"])

# Open a new log file to log events.
event_log = Logfile(filename=EVENT_LOG)
event_log.write(["time", "event"])

# Initialise the eye tracker.
tracker = EyeTracker(disp)
Example #11
0
class EyelinkGraphics(custom_display):

	"""
	Implements the EyeLink graphics that are shown on the experimental PC, such
	as the camera image, and the calibration dots. This class only implements
	the drawing operations, and little to no of the logic behind the set-up,
	which is implemented in PyLink.
	"""

	def __init__(self, display, tracker):

		"""
		Constructor.

		Arguments:
		display		--	A PyGaze Display object.
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

		pylink.EyeLinkCustomDisplay.__init__(self)

		# objects
		self.display = display
		self.screen = Screen(disptype=DISPTYPE, mousevisible=False)
		self.kb = Keyboard(keylist=None, timeout=1)
		if DISPTYPE == 'pygame':
			self.kb.set_timeout(timeout=0.001)
		# If we are using a DISPTYPE that cannot be used directly, we have to
		# save the camera image to a temporary file on each frame.
		#if DISPTYPE not in ('pygame', 'psychopy'):
		import tempfile
		import os
		self.tmp_file = os.path.join(tempfile.gettempdir(), \
			'__eyelink__.jpg')
		# drawing properties
		self.xc = self.display.dispsize[0]/2
		self.yc = self.display.dispsize[1]/2
		self.ld = 40 # line distance
		# menu
		self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False)
		self.menuscreen.draw_text(text="== Eyelink calibration menu ==", pos= \
			(self.xc,self.yc-5*self.ld), center=True, font='mono', fontsize= \
			12, antialias=True)
		self.menuscreen.draw_text(text="Press C to calibrate", pos=(self.xc, \
			self.yc-3*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		self.menuscreen.draw_text(text="Press V to validate", pos=(self.xc, \
			self.yc-2*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		self.menuscreen.draw_text(text="Press A to auto-threshold", pos=( \
			self.xc,self.yc-1*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		self.menuscreen.draw_text(text="Press Enter to show camera image", \
			pos=(self.xc,self.yc+1*self.ld), center=True, font='mono', \
			fontsize=12, antialias=True)
		self.menuscreen.draw_text(text= \
			"(then change between images using the arrow keys)", pos=(self.xc, \
			self.yc+2*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		self.menuscreen.draw_text(text="Press Q to exit menu", pos=(self.xc, \
			self.yc+5*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		# beeps
		self.__target_beep__ = Sound(osc='sine', freq=440, length=50, attack= \
			0, decay=0, soundfile=None)
		self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200, \
			attack=0, decay=0, soundfile=None)
		self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200, \
			attack=0, decay=0, soundfile=None)
		# further properties
		self.state = None
		self.imagebuffer = array.array('l')
		self.pal = None
		self.size = (0,0)
		self.set_tracker(tracker)
		self.last_mouse_state = -1

	def set_tracker(self, tracker):

		"""
		Connects the tracker to the graphics environment.

		Arguments:
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

		self.tracker = tracker
		self.tracker_version = tracker.getTrackerVersion()
		if self.tracker_version >= 3:
			self.tracker.sendCommand("enable_search_limits=YES")
			self.tracker.sendCommand("track_search_limits=YES")
			self.tracker.sendCommand("autothreshold_click=YES")
			self.tracker.sendCommand("autothreshold_repeat=YES")
			self.tracker.sendCommand("enable_camera_position_detect=YES")

	def setup_cal_display(self):

		"""
		Sets up the initial calibration display, which contains a menu with
		instructions.
		"""
		
		# show instructions
		self.display.fill(self.menuscreen)
		self.display.show()

	def exit_cal_display(self):

		"""Exits calibration display."""

		self.clear_cal_display()

	def record_abort_hide(self):

		"""TODO: What does this do?"""

		pass

	def clear_cal_display(self):

		"""Clears the calibration display"""

		self.display.fill()
		self.display.show()

	def erase_cal_target(self):

		"""TODO: What does this do?"""

		self.clear_cal_display()

	def draw_cal_target(self, x, y):

		"""
		Draws calibration target.

		Arguments:
		x		--	The X coordinate of the target.
		y		--	The Y coordinate of the target.
		"""

		self.play_beep(pylink.CAL_TARG_BEEP)
		self.screen.clear()		
		self.screen.draw_fixation(fixtype='dot', pos=(x,y))
		self.display.fill(screen=self.screen)
		self.display.show()

	def play_beep(self, beepid):

		"""
		Plays a sound.

		Arguments:
		beepid		--	A number that identifies the sound.
		"""

		if beepid == pylink.CAL_TARG_BEEP:
			# For some reason, playing the beep here doesn't work, so we have
			# to play it when the calibration target is drawn.
			if EYELINKCALBEEP:
				self.__target_beep__.play()			
		elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP:
			# show a picture
			self.screen.clear()
			self.screen.draw_text(text= \
				"calibration lost, press 'q' to return to menu", pos= \
				(self.xc,self.yc), center=True, font='mono', fontsize=12, \
				antialias=True)
			self.display.fill(self.screen)
			self.display.show()
			# play beep
			self.__target_beep__error__.play()
		elif beepid == pylink.CAL_GOOD_BEEP:
			self.screen.clear()
			if self.state == "calibration":
				self.screen.draw_text(text= \
					"Calibration succesfull, press 'v' to validate", pos= \
					(self.xc,self.yc), center=True, font='mono', fontsize=12, \
					antialias=True)
				pass
			elif self.state == "validation":
				self.screen.draw_text(text= \
					"Validation succesfull, press 'q' to return to menu", \
					pos=(self.xc,self.yc), center=True, font='mono', fontsize= \
					12, antialias=True)
				pass
			else:
				self.screen.draw_text(text="Press 'q' to return to menu", pos= \
					(self.xc,self.yc), center=True, font='mono', fontsize=12, \
					antialias=True)
				pass
			# show screen
			self.display.fill(self.screen)
			self.display.show()
			# play beep
			self.__target_beep__done__.play()
		else: #	DC_GOOD_BEEP	or DC_TARG_BEEP
			pass

	def getColorFromIndex(self, i):

		"""
		Maps a PyLink color code onto a color-name string.

		Arguments:
		i		--	A PyLink color code.

		Returns:
		A color-name string.
		"""

		print 'getColorFromIndex(%s)' % i
		if i == pylink.CR_HAIR_COLOR:
			return 'white'
		if i == pylink.PUPIL_HAIR_COLOR:
			return 'yellow'
		if i == pylink.PUPIL_BOX_COLOR:
			return 'green'
		if i == pylink.SEARCH_LIMIT_BOX_COLOR:
			return 'red'
		if i == pylink.MOUSE_CURSOR_COLOR:
			return 'blue'
		return 'black'

	def draw_line(self, x1, y1, x2, y2, colorindex):

		"""Unused"""

		# Find out how this can be used
		print 'draw_line() %s %s %s %s' % (x1, y1, x2, y2)
		
	def draw_lozenge(self, x, y, width, height, colorindex):

		"""Unused"""

		# Find out how this can be used
		print 'draw_lozenge() %s %s %s %s' % (x, y, width, height)

	def get_mouse_state(self):

		"""Unused"""

		pass

	def get_input_key(self):

		"""
		Gets an input key.

		Returns:
		A list containing a single pylink key identifier.
		"""

		try:
			key, time = self.kb.get_key(keylist=None, timeout='default')
		except:
			self.esc_pressed = True
			key = 'q'
		if key == None:
			return None
		# Escape functions as a 'q' with the additional esc_pressed flag
		if key == 'escape':
			key = 'q'
			self.esc_pressed = True
		# Process regular keys
		if key == "return":
			keycode = pylink.ENTER_KEY
			self.state = None
		elif key == "space":
			keycode = ord(" ")
		elif key == "q":
			keycode = pylink.ESC_KEY
			self.state = None
		elif key == "c":
			keycode = ord("c")
			self.state = "calibration"
		elif key == "v":
			keycode = ord("v")
			self.state = "validation"
		elif key == "a":
			keycode = ord("a")
		elif key == "up":
			keycode = pylink.CURS_UP
		elif key == "down":
			keycode = pylink.CURS_DOWN
		elif key == "left":
			keycode = pylink.CURS_LEFT
		elif key == "right":
			keycode = pylink.CURS_RIGHT
		else:
			keycode = 0
		# Convert key to PyLink keycode and return
		return [pylink.KeyInput(keycode, 0)] # 0 = pygame.KMOD_NONE

	def exit_image_display(self):

		"""Exits the image display."""

		self.clear_cal_display()

	def alert_printf(self,msg):

		"""
		Prints alert message.

		Arguments:
		msg		--	The message to be played.
		"""

		print "eyelink_graphics.alert_printf(): %s" % msg

	def setup_image_display(self, width, height):

		"""
		Initializes the buffer that will contain the camera image.

		Arguments:
		width		--	The width of the image.
		height		--	The height of the image.
		"""

		self.size = (width,height)
		self.clear_cal_display()
		self.last_mouse_state = -1
		self.imagebuffer = array.array('l')

	def image_title(self, text):

		"""
		TODO: What does this do?

		Arguments:
		text	--	Unknown.
		"""

		pass

	def draw_image_line(self, width, line, totlines, buff):

		"""
		Draws a single eye video frame, line by line.

		Arguments:

		width		--	Width of the video.
		line		--	Line nr of current line.
		totlines	--	Total lines in video.
		buff		--	Frame buffer.
		imagesize	--	The size of the image, which is (usually?) 192x160 px.
		"""

		# If the buffer hasn't been filled yet, add a line.
		for i in range(width):
			try:
				self.imagebuffer.append(self.pal[buff[i]])
			except:
				pass
		# If the buffer is full, push it to the display.
		if line == totlines:
			# First create a PIL image, then convert it to a PyGame image, and
			# then save it to a temporary file on disk. This juggling with
			# formats is necessary to show the image without distortions under
			# (so far) all conditions. Surprisingly, it doesn't cause any
			# appreciable delays, relative to directly invoking PyGame or
			# PsychoPy functions.
			bufferv = self.imagebuffer.tostring()
			img = Image.new("RGBX", self.size)
			imgsz = self.xc, self.yc
			img.fromstring(bufferv)
			img = img.resize(imgsz)
			img = pygame.image.fromstring(img.tostring(), imgsz, 'RGBX')
			pygame.image.save(img, self.tmp_file)
			# ... and then show the image.
			self.screen.clear()
			self.screen.draw_image(self.tmp_file)
			self.display.fill(self.screen)
			self.display.show()
			# Clear the buffer for the next round!
			self.imagebuffer = array.array('l')

	def set_image_palette(self, r, g, b):

		"""
		Sets the image palette.

		TODO: What this function actually does is highly mysterious. Figure it
		out!

		Arguments:
		r		--	The red channel.
		g		--	The green channel.
		b		--	The blue channel.
		"""

		self.imagebuffer = array.array('l')
		self.clear_cal_display()
		sz = len(r)
		i = 0
		self.pal = []
		while i < sz:
			rf = int(b[i])
			gf = int(g[i])
			bf = int(r[i])
			self.pal.append((rf<<16) | (gf<<8) | (bf))
			i += 1
Example #12
0
class libeyelink(BaseEyeTracker):

    MAX_TRY = 100

    def __init__(self,
                 display,
                 resolution=settings.DISPSIZE,
                 data_file=settings.LOGFILENAME + ".edf",
                 fg_color=settings.FGC,
                 bg_color=settings.BGC,
                 eventdetection=settings.EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 blink_threshold=settings.BLINKTHRESH,
                 force_drift_correct=True,
                 pupil_size_mode=settings.EYELINKPUPILSIZEMODE,
                 **args):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # try to import copy docstring (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, libeyelink)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        global _eyelink

        # Make sure that we have a valid data file. The local_data_file may
        # contain a folder. The eyelink_data_file is only a basename, i.e.
        # without folder. The eyelink_data_file must be at most eight characters
        # and end with a `.edf` extension.

        self.local_data_file = data_file
        self.eyelink_data_file = os.path.basename(data_file)
        stem, ext = os.path.splitext(self.eyelink_data_file)
        if len(stem) > 8 or ext.lower() != '.edf':
            raise Exception(
                "The EyeLink cannot handle filenames longer than eight "
                "characters (excluding '.edf' extension).")

        # properties
        self.display = display
        self.fontsize = 18
        self.scr = Screen(disptype=settings.DISPTYPE, mousevisible=False)
        self.kb = Keyboard(keylist=["escape", "q"], timeout=1)
        self.resolution = resolution
        self.recording = False
        self.saccade_velocity_treshold = saccade_velocity_threshold
        self.saccade_acceleration_treshold = saccade_acceleration_threshold
        self.blink_threshold = blink_threshold
        self.eye_used = None
        self.left_eye = 0
        self.right_eye = 1
        self.binocular = 2
        self.pupil_size_mode = pupil_size_mode
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        # degrees; maximal distance from fixation start (if gaze wanders beyond
        # this, fixation has stopped)
        self.fixtresh = 1.5
        # milliseconds; amount of time gaze has to linger within self.fixtresh
        # to be marked as a fixation
        self.fixtimetresh = 100
        # degrees per second; saccade velocity threshold
        self.spdtresh = self.saccade_velocity_treshold
        # degrees per second**2; saccade acceleration threshold
        self.accthresh = self.saccade_acceleration_treshold
        self.set_detection_type(eventdetection)
        # weighted distance, used for determining whether a movement is due to
        # measurement error (1 is ok, higher is more conservative and will
        # result in only larger saccades to be detected)
        self.weightdist = 10
        # distance between participant and screen in cm
        self.screendist = settings.SCREENDIST
        # distance between participant and screen in cm
        self.screensize = settings.SCREENSIZE
        self.pixpercm = (self.resolution[0]/float(self.screensize[0]) + \
         self.resolution[1]/float(self.screensize[1])) / 2.0
        # only initialize eyelink once
        if _eyelink == None:
            try:
                _eyelink = pylink.EyeLink()
            except:
                raise Exception(
                    "Error in libeyelink.libeyelink.__init__(): Failed to "
                    "connect to the tracker!")
        # determine software version of tracker
        self.tracker_software_ver = 0
        self.eyelink_ver = pylink.getEYELINK().getTrackerVersion()
        if self.eyelink_ver == 3:
            tvstr = pylink.getEYELINK().getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            self.tracker_software_ver = int(float(tvstr[(vindex + \
             len("EYELINK CL")):].strip()))
        if self.eyelink_ver == 1:
            self.eyelink_model = 'EyeLink I'
        elif self.eyelink_ver == 2:
            self.eyelink_model = 'EyeLink II'
        elif self.eyelink_ver == 3:
            self.eyelink_model = 'EyeLink 1000'
        else:
            self.eyelink_model = 'EyeLink (model unknown)'
        # Open graphics
        self.eyelink_graphics = EyelinkGraphics(self, _eyelink)
        pylink.openGraphicsEx(self.eyelink_graphics)
        # Optionally force drift correction. For some reason this must be done
        # as (one of) the first things, otherwise a segmentation fault occurs.
        if force_drift_correct:
            try:
                self.send_command('driftcorrect_cr_disable = OFF')
            except:
                print('Failed to force drift correction (EyeLink 1000 only)')
        # Set pupil-size mode
        if self.pupil_size_mode == 'area':
            pylink.getEYELINK().setPupilSizeDiameter(False)
        elif self.pupil_size_mode == 'diameter':
            pylink.getEYELINK().setPupilSizeDiameter(True)
        else:
            raise Exception(
             "pupil_size_mode should be 'area' or 'diameter', not %s" \
             % self.pupil_size_mode)
        pylink.getEYELINK().openDataFile(self.eyelink_data_file)
        pylink.flushGetkeyQueue()
        pylink.getEYELINK().setOfflineMode()
        # notify eyelink of display resolution
        self.send_command("screen_pixel_coords = 0 0 %d %d" % \
         (self.resolution[0], self.resolution[1]))
        # get some configuration stuff
        if self.eyelink_ver >= 2:
            self.send_command("select_parser_configuration 0")
            if self.eyelink_ver == 2:  # turn off scenelink camera stuff
                self.send_command("scene_camera_gazemap = NO")
        # set EDF file contents (this specifies which data is written to the EDF
        # file)
        self.send_command(
            "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON"
        )
        if self.tracker_software_ver >= 4:
            self.send_command(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET"
            )
        else:
            self.send_command(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
        # set link data (this specifies which data is sent through the link and
        # thus can be used in gaze contingent displays)
        self.send_command(
            "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        if self.tracker_software_ver >= 4:
            self.send_command(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET"
            )
        else:
            self.send_command(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")
        # not quite sure what this means (according to Sebastiaan Mathot, it
        # might be the button that is used to end drift correction?)
        self.send_command("button_function 5 'accept_target_fixation'")

        if not self.connected():
            raise Exception(
                "Error in libeyelink.libeyelink.__init__(): Failed to connect "
                "to the eyetracker!")

    def send_command(self, cmd):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        pylink.getEYELINK().sendCommand(cmd)

    def log(self, msg):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        pylink.getEYELINK().sendMessage(msg)

    def status_msg(self, msg):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        print('status message: %s' % msg)
        pylink.getEYELINK().sendCommand("record_status_message '%s'" % msg)

    def connected(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        return pylink.getEYELINK().isConnected()

    def calibrate(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        while True:
            if self.recording:
                raise Exception(
                    "Error in libeyelink.libeyelink.calibrate(): Trying to "
                    "calibrate after recording has started!")

            # # # # #
            # EyeLink calibration and validation

            # attempt calibrate; confirm abort when esc pressed
            while True:
                self.eyelink_graphics.esc_pressed = False
                pylink.getEYELINK().doTrackerSetup()
                if not self.eyelink_graphics.esc_pressed:
                    break
                self.confirm_abort_experiment()

            # If we are using the built-in EyeLink event detection, we don't need
            # the RMS calibration routine.
            if self.eventdetection == 'native':
                return

            # # # # #
            # RMS calibration
            while True:
                # present instructions
                self.display.fill()  # clear display
                self.scr.draw_text(text= \
                 "Noise calibration: please look at the dot\n\n(press space to start)",
                 pos=(self.resolution[0]/2, int(self.resolution[1]*0.2)),
                 center=True, fontsize=self.fontsize)
                self.scr.draw_fixation(fixtype='dot')
                self.display.fill(self.scr)
                self.display.show()
                self.scr.clear()  # clear screen again

                # wait for spacepress
                self.kb.get_key(keylist=['space'], timeout=None)

                # start recording
                self.log("PYGAZE RMS CALIBRATION START")
                self.start_recording()

                # show fixation
                self.display.fill()
                self.scr.draw_fixation(fixtype='dot')
                self.display.fill(self.scr)
                self.display.show()
                self.scr.clear()

                # wait for a bit, to allow participant to fixate
                clock.pause(500)

                # get samples
                # samplelist, prefilled with 1 sample to prevent sl[-1] from producing
                # an error; first sample will be ignored for RMS calculation
                sl = [self.sample()]
                t0 = clock.get_time()  # starting time
                while clock.get_time() - t0 < 1000:
                    s = self.sample()  # sample
                    if s != sl[-1] and s != (-1, -1) and s != (0, 0):
                        sl.append(s)

                # stop recording
                self.log("PYGAZE RMS CALIBRATION END")
                self.stop_recording()

                # calculate RMS noise
                Xvar = []
                Yvar = []
                for i in range(2, len(sl)):
                    Xvar.append((sl[i][0] - sl[i - 1][0])**2)
                    Yvar.append((sl[i][1] - sl[i - 1][1])**2)
                if Xvar and Yvar:  # check if properly recorded to avoid risk of division by zero error
                    XRMS = (sum(Xvar) / len(Xvar))**0.5
                    YRMS = (sum(Yvar) / len(Yvar))**0.5
                    self.pxdsttresh = (XRMS, YRMS)

                    # recalculate thresholds (degrees to pixels)
                    self.pxfixtresh = deg2pix(self.screendist, self.fixtresh,
                                              self.pixpercm)
                    self.pxspdtresh = deg2pix(
                        self.screendist, self.spdtresh,
                        self.pixpercm) / 1000.0  # in pixels per millisecons
                    self.pxacctresh = deg2pix(
                        self.screendist, self.accthresh,
                        self.pixpercm) / 1000.0  # in pixels per millisecond**2
                    return
                else:  # if nothing recorded, display message saying so
                    self.display.fill()
                    self.scr.draw_text(text = \
                     "Noise calibration failed.\n\nPress r to retry,\nor press space to return to calibration screen.", \
                     pos=(self.resolution[0]/2, int(self.resolution[1]*0.2)), \
                     center=True, fontsize=self.fontsize)
                    self.display.fill(self.scr)
                    self.display.show()
                    self.scr.clear()
                    # wait for space or r press, if r restart noise calibration, if space return to calibration menu
                    keypressed = self.kb.get_key(keylist=['space', 'r'],
                                                 timeout=None)
                    if keypressed[0] == 'space':
                        break

    def drift_correction(self, pos=None, fix_triggered=False):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.drift_correction(): Trying to "
                "perform drift correction after recording has started!")
        if not self.connected():
            raise Exception(
                "Error in libeyelink.libeyelink.drift_correction(): The "
                "eyelink is not connected!")
        if pos == None:
            pos = self.resolution[0] / 2, self.resolution[1] / 2
        if fix_triggered:
            return self.fix_triggered_drift_correction(pos)
        return self.manual_drift_correction(pos)

    def manual_drift_correction(self, pos):
        """
		Performs a manual, i.e. spacebar-triggered drift correction.

		Arguments:
		pos		--	The positionf or the drift-correction target.

		Returns:
		True if drift correction was successfull, False otherwise.
		"""

        self.draw_drift_correction_target(pos[0], pos[1])
        self.eyelink_graphics.esc_pressed = False
        try:
            # The 0 parameters indicate that the display should not be cleared
            # and we should not be allowed to fall back to the set-up screen.
            error = pylink.getEYELINK().doDriftCorrect(pos[0], pos[1], 0, 0)
        except:
            error = -1
        # A 0 exit code means successful drift correction
        if error == 0:
            return True
        # If escape was pressed, we present the confirm abort screen
        if self.eyelink_graphics.esc_pressed:
            self.confirm_abort_experiment()
        # If 'q' was pressed, we drop back to the calibration screen
        else:
            self.calibrate()
        return False

    def prepare_drift_correction(self, pos):
        """Puts the tracker in drift correction mode"""

        # start collecting samples in drift correction mode
        self.send_command("heuristic_filter = ON")
        self.send_command("drift_correction_targets = %d %d" % pos)
        self.send_command("start_drift_correction data = 0 0 1 0")
        pylink.msecDelay(50)
        # wait for a bit until samples start coming in (again, not sure if this
        # is indeed what's going on)
        if not pylink.getEYELINK().waitForBlockStart(100, 1, 0):
            print(
                "WARNING libeyelink.libeyelink.prepare_drift_correction(): "
                "Failed to perform drift correction (waitForBlockStart error)")

    def fix_triggered_drift_correction(self,
                                       pos=None,
                                       min_samples=30,
                                       max_dev=60,
                                       reset_threshold=10):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.fix_triggered_drift_correction(): "
                "Trying to perform drift correction after recording has started!"
            )

        self.recording = True
        if pos == None:
            pos = self.resolution[0] / 2, self.resolution[1] / 2
        self.prepare_drift_correction(pos)
        self.draw_drift_correction_target(pos[0], pos[1])

        # loop until we have enough samples
        lx = []
        ly = []
        while len(lx) < min_samples:

            # Check whether the EyeLink is put into set-up mode on the EyeLink
            # PC and, if so, jump to the calibration menu.
            if pylink.getEYELINK().getCurrentMode() == pylink.IN_SETUP_MODE:
                self.recording = False
                self.calibrate()
                print(
                    "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                    "'q' pressed")
                return False

            # pressing escape enters the calibration screen
            resp = self.kb.get_key(keylist=["escape", "q"], timeout=1)[0]
            if resp == 'escape':
                self.recording = False
                self.confirm_abort_experiment()
                print(
                    "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                    "'escape' pressed")
                return False
            elif resp == 'q':
                self.recording = False
                self.calibrate()
                print(
                    "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                    "'q' pressed")
                return False
            # collect a sample
            x, y = self.sample()
            if len(lx) == 0 or x != lx[-1] or y != ly[-1]:
                # if present sample deviates too much from previous sample,
                # start from scratch.
                if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or \
                 abs(y - ly[-1]) > reset_threshold):
                    lx = []
                    ly = []
                # Collect a sample
                else:
                    lx.append(x)
                    ly.append(y)
            # If we have enough samples to perform a drift correction ...
            if len(lx) == min_samples:
                avg_x = sum(lx) / len(lx)
                avg_y = sum(ly) / len(ly)
                d = ((avg_x - pos[0])**2 + (avg_y - pos[1])**2)**0.5
                # emulate spacebar press on succes
                pylink.getEYELINK().sendKeybutton(32, 0, pylink.KB_PRESS)
                # getCalibrationResult() returns 0 on success and an exception
                # or a non-zero value otherwise
                result = -1
                try:
                    result = pylink.getEYELINK().getCalibrationResult()
                except:
                    lx = []
                    ly = []
                    print(
                        "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                        "try again")
                if result != 0:
                    try:
                        result = pylink.getEYELINK().getCalibrationResult()
                    except:
                        lx = []
                        ly = []
                        print(
                            "libeyelink.libeyelink.fix_triggered_drift_correction(): "
                            "try again")
        # apply drift correction
        pylink.getEYELINK().applyDriftCorrect()
        self.recording = False
        print(
            "libeyelink.libeyelink.fix_triggered_drift_correction(): success")
        return True

    def start_recording(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.recording = True
        i = 0
        while True:
            # params: write samples, write event, send samples, send events
            print(u'starting recording ...')
            error = pylink.getEYELINK().startRecording(1, 1, 1, 1)
            print(u'returned %s' % error)
            if not error:
                break
            if i > self.MAX_TRY:
                raise Exception(
                    "Error in libeyelink.libeyelink.start_recording(): Failed "
                    "to start recording!")
                self.close()
                clock.expend()
            i += 1
            print(
                ("WARNING libeyelink.libeyelink.start_recording(): Failed to "
                 "start recording (attempt %d of %d)") % (i, self.MAX_TRY))
            pylink.msecDelay(100)
        # don't know what this is
        print(u'Start realtime mode ...')
        pylink.msecDelay(100)
        pylink.beginRealTimeMode(100)
        # wait a bit until samples start coming in
        print(u'Wait for block start ...')
        pylink.msecDelay(100)
        if not pylink.getEYELINK().waitForBlockStart(100, 1, 0):
            raise Exception(
                "Error in libeyelink.libeyelink.start_recording(): Failed to "
                "start recording (waitForBlockStart error)!")
        print(u'done ...')

    def stop_recording(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        print(u'stopping recording ...')
        self.recording = False
        pylink.endRealTimeMode()
        pylink.getEYELINK().setOfflineMode()
        pylink.msecDelay(500)
        print(u'done ...')

    def close(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.eyelink_graphics.close()
        if self.recording:
            self.stop_recording()
        # close data file and transfer it to the experimental PC
        print("libeyelink.libeyelink.close(): Closing data file")
        pylink.getEYELINK().closeDataFile()
        pylink.msecDelay(500)
        print("libeyelink.libeyelink.close(): Transferring %s to %s" \
         % (self.eyelink_data_file, self.local_data_file))
        # During data transfer, suppress output
        _out = sys.stdout
        with open(os.devnull, 'w') as fd:
            sys.stdout = fd
            pylink.getEYELINK().receiveDataFile(self.eyelink_data_file,
                                                self.local_data_file)
            sys.stdout = _out
        pylink.msecDelay(500)
        print("libeyelink.libeyelink.close(): Closing eyelink")
        pylink.getEYELINK().close()
        pylink.msecDelay(500)

    def set_eye_used(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.eye_used = pylink.getEYELINK().eyeAvailable()
        if self.eye_used == self.right_eye:
            self.log_var("eye_used", "right")
        elif self.eye_used == self.left_eye or self.eye_used == self.binocular:
            self.log_var("eye_used", "left")
            self.eye_used = self.left_eye
        else:
            print("WARNING libeyelink.libeyelink.set_eye_used(): Failed to "
                  "determine which eye is being recorded")

    def pupil_size(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if not self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.pupil_size(): Recording was "
                "not started before collecting eyelink data!")
        if self.eye_used == None:
            self.set_eye_used()
        # get newest sample
        s = pylink.getEYELINK().getNewestSample()
        # check if sample is new
        if s != None:
            # right eye
            if self.eye_used == self.right_eye and s.isRightSample():
                ps = s.getRightEye().getPupilSize()
            # left eye
            elif self.eye_used == self.left_eye and s.isLeftSample():
                ps = s.getLeftEye().getPupilSize()
            # invalid
            else:
                ps = -1
            # set new pupil size as previous pupil size
            self.prevps = ps
        # if no new sample is available, use old data
        else:
            ps = self.prevps
        return ps

    def sample(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if not self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.sample(): Recording was not "
                "started before collecting eyelink data!")
        if self.eye_used == None:
            self.set_eye_used()
        s = pylink.getEYELINK().getNewestSample()
        if s != None:
            if self.eye_used == self.right_eye and s.isRightSample():
                gaze = s.getRightEye().getGaze()
            elif self.eye_used == self.left_eye and s.isLeftSample():
                gaze = s.getLeftEye().getGaze()
            else:
                gaze = (-1, -1)
            self.prevsample = gaze[:]
        else:
            gaze = self.prevsample[:]
        return gaze

    def set_detection_type(self, eventdetection):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if eventdetection in ['pygaze', 'native']:
            self.eventdetection = eventdetection

        return (self.eventdetection, self.eventdetection, self.eventdetection)

    def _get_eyelink_clock_async(self):
        """
		Retrieve time differenece between tracker timestamps and
		current clock time upheld in the pygaze environment.

		Note that this is not guaranteed to be a static time difference, the
		clocks might run at different speeds. Therefore you should consider
		running this function every time you utilize on this time difference.

		Returns:
		The tracker time minus the clock time
		"""
        return pylink.getEYELINK().trackerTime() - clock.get_time()

    def wait_for_event(self, event):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        if not self.recording:
            raise Exception(
                "Error in libeyelink.libeyelink.wait_for_event(): Recording "
                "was not started before collecting eyelink data!")

        if self.eye_used == None:
            self.set_eye_used()
        if self.eventdetection == 'native':
            # since the link buffer was not have been polled, old data has
            # accumulated in the buffer -- so ignore events that are old:
            t0 = clock.get_time()  # time of call
            while True:
                d = pylink.getEYELINK().getNextData()
                if d == event:
                    float_data = pylink.getEYELINK().getFloatData()
                    # corresponding clock_time
                    tc = float_data.getTime() - self._get_eyelink_clock_async()
                    if tc > t0:
                        return tc, float_data

        if event == 5:
            outcome = self.wait_for_saccade_start()
        elif event == 6:
            outcome = self.wait_for_saccade_end()
        elif event == 7:
            outcome = self.wait_for_fixation_start()
        elif event == 8:
            outcome = self.wait_for_fixation_end()
        elif event == 3:
            outcome = self.wait_for_blink_start()
        elif event == 4:
            outcome = self.wait_for_blink_end()
        else:
            raise Exception(
                ("Error in libeyelink.libeyelink.wait_for_event: eventcode %s "
                 "is not supported") % event)
        return outcome

    def wait_for_saccade_start(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.STARTSACC)
            return t, d.getStartGaze()

        # # # # #
        # PyGaze method

        else:

            # get starting position (no blinks)
            newpos = self.sample()
            while not self.is_valid_sample(newpos):
                newpos = self.sample()
            # get starting time, position, intersampledistance, and velocity
            t0 = clock.get_time()
            prevpos = newpos[:]
            s = 0
            v0 = 0

            # get samples
            saccadic = False
            while not saccadic:
                # get new sample
                newpos = self.sample()
                t1 = clock.get_time()
                if self.is_valid_sample(newpos) and newpos != prevpos:
                    # check if distance is larger than precision error
                    sx = newpos[0] - prevpos[0]
                    sy = newpos[1] - prevpos[1]
                    # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means
                    # movement larger than RMS noise
                    if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 \
                     > self.weightdist:
                        # calculate distance
                        # intersampledistance = speed in pixels/ms
                        s = ((sx)**2 + (sy)**2)**0.5
                        # calculate velocity
                        v1 = s / (t1 - t0)
                        # calculate acceleration
                        a = (v1 - v0) / (t1 - t0
                                         )  # acceleration in pixels/ms**2
                        # check if either velocity or acceleration are above
                        # threshold values
                        if v1 > self.pxspdtresh or a > self.pxacctresh:
                            saccadic = True
                            spos = prevpos[:]
                            stime = clock.get_time()
                        # update previous values
                        t0 = copy.copy(t1)
                        v0 = copy.copy(v1)
                    # udate previous sample
                    prevpos = newpos[:]
            return stime, spos

    def wait_for_saccade_end(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.ENDSACC)
            return t, d.getStartGaze(), d.getEndGaze()

        # # # # #
        # PyGaze method

        else:

            # get starting position (no blinks)
            t0, spos = self.wait_for_saccade_start()
            # get valid sample
            prevpos = self.sample()
            while not self.is_valid_sample(prevpos):
                prevpos = self.sample()
            # get starting time, intersample distance, and velocity
            t1 = clock.get_time()
            # = intersample distance = speed in px/sample
            s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])**2)**0.5
            v0 = s / (t1 - t0)
            # run until velocity and acceleration go below threshold
            saccadic = True
            while saccadic:
                # get new sample
                newpos = self.sample()
                t1 = clock.get_time()
                if self.is_valid_sample(newpos) and newpos != prevpos:
                    # calculate distance
                    # = speed in pixels/sample
                    s = ((newpos[0]-prevpos[0])**2 + \
                     (newpos[1]-prevpos[1])**2)**0.5
                    # calculate velocity
                    v1 = s / (t1 - t0)
                    # calculate acceleration
                    # acceleration in pixels/sample**2 (actually is
                    # v1-v0 / t1-t0; but t1-t0 = 1 sample)
                    a = (v1 - v0) / (t1 - t0)
                    # check if velocity and acceleration are below threshold
                    if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and \
                     a < 0):
                        saccadic = False
                        epos = newpos[:]
                        etime = clock.get_time()
                    # update previous values
                    t0 = copy.copy(t1)
                    v0 = copy.copy(v1)
                # udate previous sample
                prevpos = newpos[:]

            return etime, spos, epos

    def wait_for_fixation_start(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.STARTFIX)
            return t, d.getTime(), d.getStartGaze()

        # # # # #
        # PyGaze method

        else:

            # function assumes a 'fixation' has started when gaze position
            # remains reasonably stable for self.fixtimetresh

            # get starting position
            spos = self.sample()
            while not self.is_valid_sample(spos):
                spos = self.sample()

            # get starting time
            t0 = clock.get_time()

            # wait for reasonably stable position
            moving = True
            while moving:
                # get new sample
                npos = self.sample()
                # check if sample is valid
                if self.is_valid_sample(npos):
                    # check if new sample is too far from starting position
                    if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > \
                     self.pxfixtresh**2: # Pythagoras
                        # if not, reset starting position and time
                        spos = copy.copy(npos)
                        t0 = clock.get_time()
                    # if new sample is close to starting sample
                    else:
                        # get timestamp
                        t1 = clock.get_time()
                        # check if fixation time threshold has been surpassed
                        if t1 - t0 >= self.fixtimetresh:
                            # return time and starting position
                            return t1, spos

    def wait_for_fixation_end(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.ENDFIX)
            return t, d.getTime(), d.getStartGaze()

        # # # # #
        # PyGaze method

        else:

            # function assumes that a 'fixation' has ended when a deviation of
            # more than fixtresh from the initial 'fixation' position has been
            # detected

            # get starting time and position
            stime, spos = self.wait_for_fixation_start()

            # loop until fixation has ended
            while True:
                # get new sample
                npos = self.sample()  # get newest sample
                # check if sample is valid
                if self.is_valid_sample(npos):
                    # check if sample deviates to much from starting position
                    if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > \
                     self.pxfixtresh**2: # Pythagoras
                        # break loop if deviation is too high
                        break

            return clock.get_time(), spos

    def wait_for_blink_start(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.STARTBLINK)
            return t, d.getTime()

        # # # # #
        # PyGaze method

        else:

            blinking = False

            # loop until there is a blink
            while not blinking:
                # get newest sample
                gazepos = self.sample()
                # check if it's a valid sample
                if not self.is_valid_sample(gazepos):
                    # get timestamp for possible blink start
                    t0 = clock.get_time()
                    # loop until a blink is determined, or a valid sample occurs
                    while not self.is_valid_sample(self.sample()):
                        # check if time has surpassed 150 ms
                        if clock.get_time() - t0 >= self.blink_threshold:
                            # return timestamp of blink start
                            return t0

    def wait_for_blink_end(self):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # # # # #
        # EyeLink method

        if self.eventdetection == 'native':
            t, d = self.wait_for_event(pylink.ENDBLINK)
            return t

        # # # # #
        # PyGaze method

        else:

            blinking = True

            # loop while there is a blink
            while blinking:
                # get newest sample
                gazepos = self.sample()
                # check if it's valid
                if self.is_valid_sample(gazepos):
                    # if it is a valid sample, blinking has stopped
                    blinking = False

            # return timestamp of blink end
            return clock.get_time()

    def set_draw_calibration_target_func(self, func):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.eyelink_graphics.draw_cal_target = func

    # ***
    #
    # Internal functions below
    #
    # ***

    def is_valid_sample(self, gazepos):
        """
		Checks if the sample provided is valid, based on EyeLink specific
		criteria.

		arguments
		gazepos		--	a (x,y) gaze position tuple, as returned by
						self.sample()

		returns
		valid			--	a Boolean: True on a valid sample, False on
						an invalid sample
		"""

        # return False if a sample is invalid
        if gazepos == (-1, -1):
            return False

        # in any other case, the sample is valid
        return True

    def confirm_abort_experiment(self):
        """
		Asks for confirmation before aborting the experiment. Displays a
		confirmation screen, collects the response, and acts accordingly.

		Exceptions:
		Raises a response_error upon confirmation.

		Returns:
		False if no confirmation was given.
		"""

        # Display the confirmation screen
        scr = Screen(disptype=settings.DISPTYPE)
        kb = Keyboard(timeout=5000)
        yc = settings.DISPSIZE[1] / 2
        xc = settings.DISPSIZE[0] / 2
        ld = 40  # Line height
        scr.draw_text(u'Really abort experiment?',
                      pos=(xc, yc - 3 * ld),
                      fontsize=self.fontsize)
        scr.draw_text(u'Press \'Y\' to abort',
                      pos=(xc, yc - 0.5 * ld),
                      fontsize=self.fontsize)
        scr.draw_text(u'Press any other key or wait 5s to go to setup',
                      pos=(xc, yc + 0.5 * ld),
                      fontsize=self.fontsize)
        self.display.fill(scr)
        self.display.show()
        # process the response:
        try:
            key, time = kb.get_key()
        except:
            return False
        # if confirmation, close experiment
        if key == u'y':
            raise Exception(u'The experiment was aborted')
        self.eyelink_graphics.esc_pressed = False
        return False

    def draw_drift_correction_target(self, x, y):
        """
		Draws the drift-correction target.

		arguments

		x		--	The X coordinate
		y		--	The Y coordinate
		"""

        self.scr.clear()
        self.scr.draw_fixation(fixtype='dot',
                               colour=settings.FGC,
                               pos=(x, y),
                               pw=0,
                               diameter=12)
        self.display.fill(self.scr)
        self.display.show()
Example #13
0
class EyeLogicTracker(BaseEyeTracker):

## Initializes the EyeTracker object.
    def __init__(self, display,
        logfile=settings.LOGFILE, \
        eventdetection=settings.EVENTDETECTION, \
        saccade_velocity_threshold=35, \
        saccade_acceleration_threshold=9500, \
        blink_threshold=settings.BLINKTHRESH, \
        **args):

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, EyeLogicTracker)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        self.disp = display
        self.screen = Screen()
        self.dispsize = self.disp.dispsize # display size in pixels
        self.screensize = settings.SCREENSIZE # display size in cm
        self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
        self.errorbeep = Sound(osc='saw', freq=100, length=100)

        # show a message
        self.screen.clear()
        self.screen.draw_text(
            text="Initialising the eye tracker, please wait...",
            fontsize=20)
        self.disp.fill(self.screen)
        self.disp.show()

        # output file properties
        self.logfile = logfile

        # eye tracker properties
        self._recording = Event()
        self._recording.clear()
        self._calibrated = Event()
        self._calibrated.clear()
        self.eye_used = 2 # 0=left, 1=right, 2=binocular
        self.sampleLock = Lock()
        self.lastSample = None
        self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)

        # event detection properties
        self.pxfixtresh = 50;
        self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation

        self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold
        self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method
        self.eventdetection = eventdetection

        self._log_vars = [ \
            "timestampMicroSec", \
            "index", \
            "porFilteredX", \
            "porFilteredY", \
            "porLeftX", \
            "porLeftY", \
            "pupilRadiusLeft", \
            "porRightX", \
            "porRightY", \
            "pupilRadiusRight", \
            ]
        # Open a new log file.
        dir_name = os.path.dirname(logfile)
        file_name = os.path.basename(logfile)
        name, ext = os.path.splitext(file_name)
        self._data_file_path = os.path.join(dir_name, name+".eyelogic.csv")
        self._log_file = open(self._data_file_path, "w")
        # Write a header to the log.
        header = ["TYPE"]
        header.extend(self._log_vars)
        self._sep = ";"
        self._log_file.write("Sep="+self._sep+"\n")
        self._log_file.write(self._sep.join(map(str, header)))
        # Create a lock to prevent simultaneous access to the log file.
        self._logging_queue = Queue()
        self._logging_queue_empty = Event()
        self._logging_queue_empty.set()
        self._connected = Event()
        self._connected.set()
        self._log_counter = 0
        self._log_consolidation_freq = 60
        
        self._logging_thread = Thread( target=self.loggingThread, \
                name='PyGaze_EyeLogic_Logging', args=[])

        global g_api
        g_api = self

        # log
        self.log("pygaze initiation")
        #self.log("experiment = {}".format(self.description))
        #self.log("participant = {}".format(self.participant))
        self.log("display resolution = {}x{}".format(self.dispsize[0], \
            self.dispsize[1]))
        self.log("display size in cm = {}x{}".format(self.screensize[0], \
            self.screensize[1]))
        self.log("fixation threshold = {} degrees".format(self.fixtresh))
        self.log("speed threshold = {} degrees/second".format(self.spdtresh))
        self.log("acceleration threshold = {} degrees/second**2".format( \
            self.accthresh))

        # connect
        self.api = ELApi( "PyGaze" )
        self.api.registerGazeSampleCallback( gazeSampleCallback )
        self.api.registerEventCallback( eventCallback )

        resultConnect = self.api.connect()
        if (resultConnect != ELApi.ReturnConnect.SUCCESS):
            self._connected.clear()
            raise Exception("Cannot connect to EyeLogic server = {}".format(errorstringConnect(resultConnect)))
        self._connected.set()

        screenConfig = self.api.getScreenConfig()
        self.log("eye tracker is mounted on screen {}".format(screenConfig.id))
        self.rawResolution = (screenConfig.resolutionX, screenConfig.resolutionY)
        self.log("raw screen resolution = {}x{}".format(
            self.rawResolution[0], self.rawResolution[1]))
        self.log("end pygaze initiation")

        deviceConfig = self.api.getDeviceConfig()
        if (deviceConfig.deviceSerial == 0):
            raise Exception("no eye tracking device connected")
        if (len(deviceConfig.frameRates) == 0):
            raise Exception("failed to read out device configuration")
        g_api.sampleRate = deviceConfig.frameRates[0]
        g_api.sampleTime = 1000.0 / g_api.sampleRate
        g_api.log("samplerate = {} Hz".format(g_api.sampleRate))
        g_api.log("sampletime = {} ms".format(g_api.sampleTime))
        self._logging_thread.start()

        self.screen.clear()
        self.disp.fill(self.screen)
        self.disp.show()


    def loggingThread(self):
        while self._connected.is_set():
            
            # Check if the sample Queue is empty.
            if self._logging_queue.empty():
                # Signal to other Threads that the logging Queue is empty.
                if not self._logging_queue_empty.is_set():
                    self._logging_queue_empty.set()
            
            # Process data from the Queue.
            else:
                # Signal to other Threads that the Queue isn't empty.
                if self._logging_queue_empty.is_set():
                    self._logging_queue_empty.clear()
                # Get the next object from the Queue.
                sample = self._logging_queue.get()
                # Log the message string and/or the sample.
                if type(sample) in [tuple, list]:
                    self._write_tuple(sample)
                elif type(sample) == ELGazeSample:
                    self._write_sample(sample)
                else:
                    print("WARNING = Unrecognised object in log queue = '{}'".format( \
                        sample))
                # Increment the log counter.
                self._log_counter += 1
                # Check if the log file needs to be consolidated.
                if self._log_counter % self._log_consolidation_freq == 0:
                    # Internal buffer to RAM.
                    self._log_file.flush()
                    # RAM to disk.
                    os.fsync(self._log_file.fileno())
                    # Release the log file lock.
    
    def _write_sample(self, sample):
        # Construct a list with the sample data.
        line = ["DAT"]
        for var in self._log_vars:
            line.append(sample.__getattribute__(var))
        # Log the sample to the log file.
        self._log_file.write("\n" + self._sep.join(map(str, line)))

    def _write_tuple(self, tup):

        # Construct a list values that need to be logged.
        line = []
        # Add the values that need to be logged. Usually this will be ("MSG",
        # timestamp, message).
        line.extend(tup)
        # Pad the list so that it will be of equal length to the sample
        # lines, which makes it easier to be read into a spreadsheet editor
        # and by some read_csv functions.
        line.extend([""] * (len(self._log_vars) - len(line) - 1))

        # Log the line to the log file.
        self._log_file.write("\n" + self._sep.join(map(str, line)))

## Calibrates the eye tracking system.
    def calibrate(self):
        #self.screen.clear()
        #self.screen.draw_text(
        #    text="Calibrate EyeTracker",
        #    fontsize=20)
        #self.disp.fill(self.screen)
        #self.disp.show()

        if (not self._recording.is_set()):
            resultTracking = self.api.requestTracking(0)
            if (resultTracking != ELApi.ReturnStart.SUCCESS):
                raise Exception("unable to start eye tracker")

        resultCalibrate = self.api.calibrate(0)
        if (resultCalibrate != ELApi.ReturnCalibrate.SUCCESS):
            self.api.unrequestTracking()
            self.errorbeep.play()
            raise Exception("Calibration failed = {}".format(errorstringCalibrate(resultCalibrate)))
        self._calibrated.set()

        # NOISE CALIBRATION
        self.screen.clear()
        self.screen.draw_text(
            text="Noise calibration. Please look at the dot, and press any key to start.",
            fontsize=20, \
            pos=(int(self.dispsize[0]/2),int(self.dispsize[1]*0.3)))
        x = int(float(self.dispsize[0]) / 2.0)
        y = int(float(self.dispsize[1]) / 2.0)
        self.screen.draw_fixation(fixtype="dot", pos=(x,y))
        self.disp.fill(self.screen)
        self.disp.show()
        self.kb.get_key(keylist=None, timeout=None, flush=True)

        # wait for a bit, to allow participant to fixate
        clock.pause(500)

        # get distance to screen
        screendist = 0
        i = 0
        while screendist == 0 and i < self.maxtries:
            i = i+1
            self.sampleLock.acquire()
            if (self.lastSample is not None):
                if self.eye_used != 1 and self.lastSample.eyePositionLeftZ != ELInvalidValue:
                    screendist = self.lastSample.eyePositionLeftZ / 10.0 # eyePositionZ is in mm; screendist is in cm
                elif self.eye_used != 0 and self.lastSample.eyePositionRightZ != ELInvalidValue:
                    screendist = self.lastSample.eyePositionRightZ / 10.0
            self.sampleLock.release()
            clock.pause(int(self.sampleTime))
        if i >= self.maxtries:
            self.api.unrequestTracking()
            self.errorbeep.play()
            raise Exception("unable to receive gaze data for noise calibration")

        # get samples
        sl = [self.sample()] # samplelist, prefilled with 1 sample to prevent sl[-1] from producing an error; first sample will be ignored for RMS calculation
        t0 = clock.get_time() # starting time
        while clock.get_time() - t0 < 1000:
            s = self.sample() # sample
            if s[0] != -1 and s[1] != -1 and s[0] != ELInvalidValue and s[1] != ELInvalidValue:
                sl.append(s)
            clock.pause(int(self.sampleTime))
        if (len(sl) < 2):
            if (not self._recording.is_set()):
                self.api.unrequestTracking()
            return False

        # calculate RMS noise
        Xvar = []
        Yvar = []
        Xmean = 0.
        Ymean = 0.
        for i in range(2,len(sl)):
            Xvar.append((sl[i][0]-sl[i-1][0])**2)
            Yvar.append((sl[i][1]-sl[i-1][1])**2)
            Xmean += sl[i][0]
            Ymean += sl[i][1]
        XRMS = (sum(Xvar) / len(Xvar))**0.5
        YRMS = (sum(Yvar) / len(Yvar))**0.5
        Xmean = Xmean / (len(sl)-2)
        Ymean = Ymean / (len(sl)-2)
        self.pxdsttresh = (XRMS, YRMS)

        # calculate pixels per cm
        pixpercm = (self.dispsize[0]/float(self.screensize[0]) + self.dispsize[1]/float(self.screensize[1])) / 2

        # get accuracy
        accuracyPxX = abs( Xmean - x )
        accuracyPxY = abs( Ymean - y )
        self.accuracy = ( pix2deg(screendist, accuracyPxX, pixpercm), \
                          pix2deg(screendist, accuracyPxY, pixpercm) )

        # calculate thresholds based on tracker settings
        self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm)
        self.pxaccuracy = (accuracyPxX, accuracyPxY )
        self.pxspdtresh = deg2pix(screendist, self.spdtresh/1000.0, pixpercm) # in pixels per millisecond
        self.pxacctresh = deg2pix(screendist, self.accthresh/1000.0, pixpercm) # in pixels per millisecond**2

        ## log
        self.log("pygaze calibration")
        self.log("accuracy (degrees) = X={}, Y={}".format( \
            self.accuracy[0], self.accuracy[1] ))
        self.log("accuracy (in pixels) = X={}, Y={}".format( \
            self.pxaccuracy[0], self.pxaccuracy[1]))
        self.log("precision (RMS noise in pixels) = X={}, Y={}".format( \
            self.pxdsttresh[0], self.pxdsttresh[1]))
        self.log("distance between participant and display = {} cm".format(screendist))
        self.log("fixation threshold = {} pixels".format(self.pxfixtresh))
        self.log("speed threshold = {} pixels/ms".format(self.pxspdtresh))
        self.log("acceleration threshold = {} pixels/ms**2".format(self.pxacctresh))
        
        if (not self._recording.is_set()):
            self.api.unrequestTracking()
        return True

## Neatly closes connection to tracker.
    def close(self):
        if self._recording.is_set():
            self.stop_recording()
            
        # Wait until the Queue is empty, or until 60 seconds have passed.
        queue_empty = self._logging_queue_empty.wait(timeout=15.0)
        if not queue_empty:
            print("WARNING = Logging Thread timeout occurred; something might have gone wrong!")
        
        # Signal to the Threads to stop.
        self._connected.clear()
        
        # Close the log file.
        self._log_file.close()
        
        # Close the connection.
        self.api.disconnect()
        self._connected = False

## Checks if the tracker is connected.
    def connected(self):
        isConnected = self.api.isConnected()
        if isConnected:
            self._connected.set()
        else:
            self._connected.clear()
        return isConnected

## Performs a drift check
    def drift_correction(self, pos=None, fix_triggered=False):
        return True

## Performs a fixation triggered drift correction by collecting
#  a number of samples and calculating the average distance from the
#  fixation position.
    def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30):
        pass

## Returns the difference between tracker time and PyGaze time,
#  which can be used to synchronize timing
    def get_eyetracker_clock_async(self):
        return 0

## Writes a message to the log file.
    def log(self, msg):
        # Get current timestamp.
        self.sampleLock.acquire()
        if self.lastSample is None:
            t = 0
        else:
            t = self.lastSample.timestampMicroSec
        self.sampleLock.release()

        # Construct a tuple, and add it to the queue.
        self._logging_queue.put(("MSG", t, msg))

## Writes a variable's name and value to the log file
    def log_var(self, var, val):
        pass

## Returns the newest pupil size sample
    def pupil_size(self):
        self.sampleLock.acquire()
        pupilSize = -1
        if (self.lastSample is not None):
            if self.eye_used == 0:
                pupilSize = 2.*self.lastSample.pupilRadiusLeft;
            elif self.eye_used == 1:
                pupilSize = 2.*self.lastSample.pupilRadiusRight;
            elif self.eye_used == 2:
                pupilSize = self.lastSample.pupilRadiusLeft + self.lastSample.pupilRadiusRight;
        self.sampleLock.release()
        return pupilSize

## Returns newest available gaze position.
    def sample(self):
        self.sampleLock.acquire()
        por = (-1, -1)
        if (self.lastSample is not None):
            if self.eye_used == 0:
                por = (self.lastSample.porLeftX, self.lastSample.porLeftY)
            elif self.eye_used == 1:
                por = (self.lastSample.porRightX, self.lastSample.porRightY)
            elif self.eye_used == 2:
                por = (self.lastSample.porFilteredX, self.lastSample.porFilteredY)
        self.sampleLock.release()
        return por

# Directly sends a command to the eye tracker.
    def send_command(self, cmd):
        pass

## Set the event detection type to either PyGaze algorithms, or
#  native algorithms.
    def set_detection_type(self, eventdetection):
        # detection type for saccades, fixations, blinks (pygaze or native)
        return ('pygaze','pygaze','pygaze')

## Specifies a custom function to draw the calibration target.
    def set_draw_calibration_target_func(self, func):
        pass

## Specifies a custom function to draw the drift-correction target.
    def set_draw_drift_correction_target_func(self, func):
        pass

## Logs the eye_used variable, based on which eye was specified
#  (if both eyes are being tracked, the left eye is used)
    def set_eye_used(self):
        pass

## Starts recording.
    def start_recording(self):
        resultTracking = self.api.requestTracking(0)
        if (resultTracking != ELApi.ReturnStart.SUCCESS):
            raise Exception("unable to start eye tracker")
        self._recording.set()

## Sends a status message to the eye tracker, which is displayed in the tracker's GUI
    def status_msg(self, msg):
        pass

## Stops recording.
    def stop_recording(self):
        self.api.unrequestTracking()
        self._recording.clear()

## Waits for an event.
    def wait_for_event(self, event):
        print("waitforevent", flush=True)

        if event == 3: # STARTBLINK
            return self.wait_for_blink_start()
        elif event == 4: # ENDBLINK
            return self.wait_for_blink_end()
        elif event == 5: # STARTSACC
            return self.wait_for_saccade_start()
        elif event == 6: # ENDSACC
            return self.wait_for_saccade_end()
        elif event == 7: # STARTFIX
            return self.wait_for_fixation_start()
        elif event == 8: # ENDFIX
            return self.wait_for_fixation_end()
        else:
           raise Exception("wait_for_event({}) is not supported".format(event))

## Waits for a blink end and returns the blink ending time.
    def wait_for_blink_end(self):
        blinking = True

        # loop while there is a blink
        while blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's valid
            if self.is_valid_sample(gazepos):
                # if it is a valid sample, blinking has stopped
                blinking = False

        # return timestamp of blink end
        return clock.get_time()

## Waits for a blink start and returns the blink starting time.
    def wait_for_blink_start(self):
        blinking = False

        # loop until there is a blink
        while not blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's a valid sample
            if not self.is_valid_sample(gazepos):
                # get timestamp for possible blink start
                t0 = clock.get_time()
                # loop until a blink is determined, or a valid sample occurs
                while not self.is_valid_sample(self.sample()):
                    # check if time has surpassed BLINKTHRESH
                    if clock.get_time()-t0 >= self.blinkthresh:
                        # return timestamp of blink start
                        return t0

## Returns time and gaze position when a fixation has ended.
    def wait_for_fixation_end(self):
        print("wait_for_fixation_end", flush=True)
        # function assumes that a 'fixation' has ended when a deviation of more than fixtresh
        # from the initial 'fixation' position has been detected

        # get starting time and position
        stime, spos = self.wait_for_fixation_start()

        # loop until fixation has ended
        while True:
            # get new sample
            npos = self.sample() # get newest sample
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if sample deviates to much from starting position
                if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras
                    # break loop if deviation is too high
                    break

        return clock.get_time(), spos

## Returns starting time and position when a fixation is started.
    def wait_for_fixation_start(self):
        print("wait_for_fixation_start", flush=True)

        # function assumes a 'fixation' has started when gaze position
        # remains reasonably stable for self.fixtimetresh

        # get starting position
        spos = self.sample()
        while not self.is_valid_sample(spos):
            spos = self.sample()

        # get starting time
        t0 = clock.get_time()

        # wait for reasonably stable position
        moving = True
        while moving:
            # get new sample
            npos = self.sample()
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if new sample is too far from starting position
                if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras
                    # if not, reset starting position and time
                    spos = copy.copy(npos)
                    t0 = clock.get_time()
                # if new sample is close to starting sample
                else:
                    # get timestamp
                    t1 = clock.get_time()
                    # check if fixation time threshold has been surpassed
                    if t1 - t0 >= self.fixtimetresh:
                        # return time and starting position
                        return t1, spos

## Returns ending time, starting and end position when a saccade is
#  ended.
    def wait_for_saccade_end(self):
        # get starting position (no blinks)
        t0, spos = self.wait_for_saccade_start()
        # get valid sample
        prevpos = self.sample()
        while not self.is_valid_sample(prevpos):
            prevpos = self.sample()
        # get starting time, intersample distance, and velocity
        t1 = clock.get_time()
        s = ((prevpos[0]-spos[0])**2 + (prevpos[1]-spos[1])**2)**0.5 # = intersample distance = speed in px/sample
        v0 = s / (t1-t0)

        # run until velocity and acceleration go below threshold
        saccadic = True
        while saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # calculate distance
                s = ((newpos[0]-prevpos[0])**2 + (newpos[1]-prevpos[1])**2)**0.5 # = speed in pixels/sample
                # calculate velocity
                v1 = s / (t1-t0)
                # calculate acceleration
                a = (v1-v0) / (t1-t0) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample)
                # check if velocity and acceleration are below threshold
                if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and a < 0):
                    saccadic = False
                    epos = newpos[:]
                    etime = clock.get_time()
                # update previous values
                t0 = copy.copy(t1)
                v0 = copy.copy(v1)
            # udate previous sample
            prevpos = newpos[:]

        return etime, spos, epos

## Returns starting time and starting position when a saccade is started.
    def wait_for_saccade_start(self):
        # get starting position (no blinks)
        newpos = self.sample()
        while not self.is_valid_sample(newpos):
            newpos = self.sample()
        # get starting time, position, intersampledistance, and velocity
        t0 = clock.get_time()
        prevpos = newpos[:]
        s = 0
        v0 = 0

        # get samples
        saccadic = False
        while not saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # check if distance is larger than precision error
                sx = newpos[0]-prevpos[0]; sy = newpos[1]-prevpos[1]
                if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 > self.weightdist: # weigthed distance = (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise
                    # calculate distance
                    s = ((sx)**2 + (sy)**2)**0.5 # intersampledistance = speed in pixels/ms
                    # calculate velocity
                    v1 = s / (t1-t0)
                    # calculate acceleration
                    a = (v1-v0) / (t1-t0) # acceleration in pixels/ms**2
                    # check if either velocity or acceleration are above threshold values
                    if v1 > self.pxspdtresh or a > self.pxacctresh:
                        saccadic = True
                        spos = prevpos[:]
                        stime = clock.get_time()
                    # update previous values
                    t0 = copy.copy(t1)
                    v0 = copy.copy(v1)

                # udate previous sample
                prevpos = newpos[:]

        return stime, spos
Example #14
0
    def __init__(self,
                 display,
                 address='192.168.71.50',
                 udpport=49152,
                 logfile=settings.LOGFILE,
                 eventdetection=settings.EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 blink_threshold=settings.BLINKTHRESH,
                 **args):
        """Initializes a TobiiProGlassesTracker instance

		arguments
		display	--	a pygaze.display.Display instance

		keyword arguments
		address	-- internal ipv4/ipv6 address for Tobii Pro Glasses 2 (default =
				   '192.168.71.50', for IpV6 address use square brackets [fe80::xxxx:xxxx:xxxx:xxxx])
		udpport	-- UDP port number for Tobii Pro Glasses data streaming (default = 49152)
		"""

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, TobiiProGlassesTracker)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        # object properties
        self.disp = display
        self.screen = Screen()
        self.dispsize = settings.DISPSIZE  # display size in pixels
        self.screensize = settings.SCREENSIZE  # display size in cm
        self.screendist = settings.SCREENDIST  # distance between participant and screen in cm
        self.pixpercm = (self.dispsize[0] / float(self.screensize[0]) +
                         self.dispsize[1] / float(self.screensize[1])) / 2.0
        self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
        self.errorbeep = Sound(osc='saw', freq=100, length=100)

        # output file properties
        self.outputfile = logfile
        self.description = "experiment"  # TODO: EXPERIMENT NAME
        self.participant = "participant"  # TODO: PP NAME

        # eye tracker properties
        self.eye_used = 0  # 0=left, 1=right, 2=binocular
        self.left_eye = 0
        self.right_eye = 1
        self.binocular = 2

        self.maxtries = 100  # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
        self.prevsample = (-1, -1)

        # validation properties
        self.nvalsamples = 1000  # samples for one validation point

        # event detection properties
        self.fixtresh = 1.5  # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100  # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
        self.spdtresh = saccade_velocity_threshold  # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold  # degrees per second**2; saccade acceleration threshold
        self.blinkthresh = blink_threshold  # milliseconds; blink detection threshold used in PyGaze method
        self.eventdetection = eventdetection
        self.set_detection_type(self.eventdetection)
        self.weightdist = 10  # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

        self.tobiiglasses = TobiiGlassesController(udpport, address)

        self.triggers_values = {}

        self.logging = False
        self.current_recording_id = None
        self.current_participant_id = None
        self.current_project_id = None
Example #15
0
    def __init__(self, display, logfile=settings.LOGFILE,
                 eventdetection=settings.EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 blink_threshold=settings.BLINKTHRESH, **args):
        """Initializes a TobiiProTracker instance

        arguments
        display	--	a pygaze.display.Display instance

        keyword arguments
        None
        """
        self.gaze = []

        self.disp = display

        # initialize a screen
        self.screen = Screen()

        # initialize keyboard
        self.kb = Keyboard(keylist=['space', 'escape', 'q','enter'], timeout=1)

        self.recording = False

        self.screendist = settings.SCREENDIST

        if hasattr(settings, 'TRACKERSERIALNUMBER'):
            # Search for a specific eye tracker
            self.eyetrackers = [t for t in tr.find_all_eyetrackers() if t.serial_number == settings.TRACKERSERIALNUMBER]
        else:
            # Search for all eye trackers (The first one found will be selected)
            self.eyetrackers = tr.find_all_eyetrackers()

        if self.eyetrackers:
            self.eyetracker = self.eyetrackers[0]
        else:
            print("WARNING! libtobii.TobiiProTracker.__init__: no eye trackers found!")


        self.LEFT_EYE = 0
        self.RIGHT_EYE = 1
        self.BINOCULAR = 2
        self.eye_used = 0  # 0=left, 1=right, 2=binocular

        # calibration and validation points
        lb = 0.1  # left bound
        xc = 0.5  # horizontal center
        rb = 0.9  # right bound
        ub = 0.1  # upper bound
        yc = 0.5  # vertical center
        bb = 0.9  # bottom bound

        self.points_to_calibrate = [self._norm_2_px(p) for p in [(lb, ub), (xc,ub), (rb, ub), (lb,yc), (xc, yc),(rb,yc), (lb, bb),(xc,bb),(rb, bb)]]

        # event detection properties
        self.fixtresh = 1.5  # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100  # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
        self.spdtresh = saccade_velocity_threshold  # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold  # degrees per second**2; saccade acceleration threshold
        self.blinkthresh = blink_threshold  # milliseconds; blink detection threshold used in PyGaze method
        self.eventdetection = eventdetection
        self.weightdist = 10  # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

        self.screensize = settings.SCREENSIZE  # display size in cm
        self.pixpercm = (self.disp.dispsize[0] / float(self.screensize[0]) + self.disp.dispsize[1] / float(self.screensize[1])) / 2.0
        self.errdist = 2  # degrees; maximal error for drift correction
        self.pxerrdist = self._deg2pix(self.screendist, self.errdist, self.pixpercm)

        self.event_data = []

        self.t0 = None
        self._write_enabled = True

        self.datafile = open("{0}_TOBII_output.tsv".format(logfile), 'w')

        # initiation report
        self.datafile.write("pygaze initiation report start\n")
        self.datafile.write("display resolution: %sx%s\n" % (self.disp.dispsize[0], self.disp.dispsize[1]))
        self.datafile.write("display size in cm: %sx%s\n" % (self.screensize[0], self.screensize[1]))
        self.datafile.write("fixation threshold: %s degrees\n" % self.fixtresh)
        self.datafile.write("speed threshold: %s degrees/second\n" % self.spdtresh)
        self.datafile.write("acceleration threshold: %s degrees/second**2\n" % self.accthresh)
        self.datafile.write("pygaze initiation report end\n")
Example #16
0
class TobiiProTracker(BaseEyeTracker):
    """A class for Tobii Pro EyeTracker objects"""

    def __init__(self, display, logfile=settings.LOGFILE,
                 eventdetection=settings.EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 blink_threshold=settings.BLINKTHRESH, **args):
        """Initializes a TobiiProTracker instance

        arguments
        display	--	a pygaze.display.Display instance

        keyword arguments
        None
        """
        self.gaze = []

        self.disp = display

        # initialize a screen
        self.screen = Screen()

        # initialize keyboard
        self.kb = Keyboard(keylist=['space', 'escape', 'q','enter'], timeout=1)

        self.recording = False

        self.screendist = settings.SCREENDIST

        if hasattr(settings, 'TRACKERSERIALNUMBER'):
            # Search for a specific eye tracker
            self.eyetrackers = [t for t in tr.find_all_eyetrackers() if t.serial_number == settings.TRACKERSERIALNUMBER]
        else:
            # Search for all eye trackers (The first one found will be selected)
            self.eyetrackers = tr.find_all_eyetrackers()

        if self.eyetrackers:
            self.eyetracker = self.eyetrackers[0]
        else:
            print("WARNING! libtobii.TobiiProTracker.__init__: no eye trackers found!")


        self.LEFT_EYE = 0
        self.RIGHT_EYE = 1
        self.BINOCULAR = 2
        self.eye_used = 0  # 0=left, 1=right, 2=binocular

        # calibration and validation points
        lb = 0.1  # left bound
        xc = 0.5  # horizontal center
        rb = 0.9  # right bound
        ub = 0.1  # upper bound
        yc = 0.5  # vertical center
        bb = 0.9  # bottom bound

        self.points_to_calibrate = [self._norm_2_px(p) for p in [(lb, ub), (xc,ub), (rb, ub), (lb,yc), (xc, yc),(rb,yc), (lb, bb),(xc,bb),(rb, bb)]]

        # event detection properties
        self.fixtresh = 1.5  # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100  # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
        self.spdtresh = saccade_velocity_threshold  # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold  # degrees per second**2; saccade acceleration threshold
        self.blinkthresh = blink_threshold  # milliseconds; blink detection threshold used in PyGaze method
        self.eventdetection = eventdetection
        self.weightdist = 10  # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

        self.screensize = settings.SCREENSIZE  # display size in cm
        self.pixpercm = (self.disp.dispsize[0] / float(self.screensize[0]) + self.disp.dispsize[1] / float(self.screensize[1])) / 2.0
        self.errdist = 2  # degrees; maximal error for drift correction
        self.pxerrdist = self._deg2pix(self.screendist, self.errdist, self.pixpercm)

        self.event_data = []

        self.t0 = None
        self._write_enabled = True

        self.datafile = open("{0}_TOBII_output.tsv".format(logfile), 'w')

        # initiation report
        self.datafile.write("pygaze initiation report start\n")
        self.datafile.write("display resolution: %sx%s\n" % (self.disp.dispsize[0], self.disp.dispsize[1]))
        self.datafile.write("display size in cm: %sx%s\n" % (self.screensize[0], self.screensize[1]))
        self.datafile.write("fixation threshold: %s degrees\n" % self.fixtresh)
        self.datafile.write("speed threshold: %s degrees/second\n" % self.spdtresh)
        self.datafile.write("acceleration threshold: %s degrees/second**2\n" % self.accthresh)
        self.datafile.write("pygaze initiation report end\n")

    def _norm_2_px(self, normalized_point):
        return (round(normalized_point[0] * self.disp.dispsize[0], 0), round(normalized_point[1] * self.disp.dispsize[1], 0))

    def _px_2_norm(self, pixelized_point):
        return (pixelized_point[0] / self.disp.dispsize[0], pixelized_point[1] / self.disp.dispsize[1])

    def _mean(self, array):
        if array:
            a = [s for s in array if s is not None]
            return sum(a) / float(len(a))

    def _deg2pix(self, cmdist, angle, pixpercm):
        return pixpercm * math.tan(math.radians(angle)) * float(cmdist)

    def log_var(self, var, val):
        """Writes a variable to the log file

        arguments
        var		-- variable name
        val		-- variable value

        returns
        Nothing	-- uses native log function to include a line
                    in the log file in a "var NAME VALUE" layout
        """
        self.log("var %s %s" % (var, val))

    def set_eye_used(self):
        """Logs the eye_used variable, based on which eye was specified.

        arguments
        None

        returns
        Nothing	-- logs which eye is used by calling self.log_var, e.g.
                   self.log_var("eye_used", "right")
        """
        if self.eye_used == self.BINOCULAR:
            self.log_var("eye_used", "binocular")
        elif self.eye_used == self.RIGHT_EYE:
            self.log_var("eye_used", "right")
        else:
            self.log_var("eye_used", "left")

    def is_valid_sample(self, sample):
        """Checks if the sample provided is valid, based on Tobii specific
        criteria (for internal use)

        arguments
        sample		--	a (x,y) gaze position tuple, as returned by
                        self.sample()

        returns
        valid			--	a Boolean: True on a valid sample, False on
                        an invalid sample
        """
        return sample != (-1, -1)

    def _on_gaze_data(self, gaze_data):
        self.gaze.append(gaze_data)
        if self._write_enabled:
            self._write_sample(gaze_data)

    def start_recording(self):
        """Starts recording eye position

        arguments
        None

        returns
        None		-- sets self.recording to True when recording is
                   successfully started
        """
        if not self.t0 and self._write_enabled:
            self.t0 = tr.get_system_time_stamp()
            self._write_header()

        if self.recording:
            print("WARNING! libtobii.TobiiProTracker.start_recording: Recording already started!")
            self.gaze = []
        else:
            self.gaze = []
            self.eyetracker.subscribe_to(tr.EYETRACKER_GAZE_DATA, self._on_gaze_data, as_dictionary=True)
            time.sleep(1)
            self.recording = True

    def stop_recording(self):
        """Stop recording eye position

        arguments
        None

        returns
        Nothing	-- sets self.recording to False when recording is
                   successfully started
        """
        if self.recording:
            self.eyetracker.unsubscribe_from(tr.EYETRACKER_GAZE_DATA)
            self.recording = False
            self.event_data = []
        else:
            print("WARNING! libtobii.TobiiProTracker.stop_recording: A recording has not been started!")

    def sample(self):
        """Returns newest available gaze position

        The gaze position is relative to the self.eye_used currently selected.
        If both eyes are selected, the gaze position is averaged from the data of both eyes.

        arguments
        None

        returns
        sample	-- an (x,y) tuple or a (-1,-1) on an error
        """

        gaze_sample = copy.copy(self.gaze[-1])


        if self.eye_used == self.LEFT_EYE and gaze_sample["left_gaze_point_validity"]:
            return self._norm_2_px(gaze_sample["left_gaze_point_on_display_area"])
        if self.eye_used == self.RIGHT_EYE and gaze_sample["right_gaze_point_validity"]:
            return self._norm_2_px(gaze_sample["right_gaze_point_on_display_area"])

        if self.eye_used == self.BINOCULAR:
            if gaze_sample["left_gaze_point_validity"] and gaze_sample["right_gaze_point_validity"]:
                left_sample = self._norm_2_px(gaze_sample["left_gaze_point_on_display_area"])
                right_sample = self._norm_2_px(gaze_sample["right_gaze_point_on_display_area"])
                return (self._mean([left_sample[0], right_sample[0]]), self._mean([left_sample[1], right_sample[1]]))

            if gaze_sample["left_gaze_point_validity"]:
                return self._norm_2_px(gaze_sample["left_gaze_point_on_display_area"])

            if gaze_sample["right_gaze_point_validity"]:
                return self._norm_2_px(gaze_sample["right_gaze_point_on_display_area"])

        return (-1, -1)

    def pupil_size(self):
        """Returns newest available pupil size

        arguments
        None

        returns
        pupilsize	-- a float if only eye is selected or only one eye has valid data.
                    -- a tuple with two floats if both eyes are selected.
                    -- -1 if there is no valid pupil data available.
        """
        if self.gaze:
            gaze_sample = copy.copy(self.gaze[-1])
            if self.eye_used == self.BINOCULAR:
                pupil_data = [-1, -1]
                if gaze_sample["left_pupil_validity"]:
                    pupil_data[0] = gaze_sample["left_pupil_diameter"]
                if gaze_sample["right_pupil_validity"]:
                    pupil_data[1] = gaze_sample["right_pupil_diameter"]
                return tuple(pupil_data)
            if self.eye_used == self.LEFT_EYE and gaze_sample["left_pupil_validity"]:
                return gaze_sample["left_pupil_diameter"]
            if self.eye_used == self.RIGHT_EYE and gaze_sample["right_pupil_validity"]:
                return gaze_sample["right_pupil_diameter"]
        return -1


    def ReduceBall (self,point,facteur,colour):
        self.showPoint(point,colour,facteur)
        for i in range (0,200,3):
            self.screen.clear()
            self.screen.draw_circle(colour=colour, pos=point, r=int(self.disp.dispsize[0] / (facteur+i)), pw=5, fill=True)
            self.disp.fill(self.screen)
            self.disp.show()
			



    def showPoint( self,point,colour,facteur):
        self.screen.clear()
        self.screen.draw_circle(colour=colour, pos=point, r=int(self.disp.dispsize[0] / facteur), pw=5, fill=True)
        self.disp.fill(self.screen)
        self.disp.show()


    def calibrate(self, calibrate=True, validate=True):
        """Calibrates the eye tracker.

        arguments
        None

        keyword arguments
        calibrate	--	Boolean indicating if calibration should be
                    performed (default = True).
        validate	--	Boolean indicating if validation should be performed
                    (default = True).

        returns
        success	--	returns True if calibration succeeded, or False if
                    not; in addition a calibration log is added to the
                    log file and some properties are updated (i.e. the
                    thresholds for detection algorithms)
        """
        self._write_enabled = False
        self.start_recording()
        self.screen.set_background_colour(colour=(0, 0, 0))

        if calibrate:
            origin = (int(self.disp.dispsize[0] / 4), int(self.disp.dispsize[1] / 4))
            size = (int(2 * self.disp.dispsize[0] / 4), int(2 * self.disp.dispsize[1] / 4))

            while not self.kb.get_key(keylist=['space'], flush=False)[0]:
                gaze_sample = copy.copy(self.gaze[-1])

                self.screen.clear()

                validity_colour = (255, 0, 0)

                if gaze_sample['right_gaze_origin_validity'] and gaze_sample['left_gaze_origin_validity']:
                    left_validity = 0.15 < gaze_sample['left_gaze_origin_in_trackbox_coordinate_system'][2] < 0.85
                    right_validity = 0.15 < gaze_sample['right_gaze_origin_in_trackbox_coordinate_system'][2] < 0.85
                    if left_validity and right_validity:
                        validity_colour = (0, 255, 0)

                self.screen.draw_text(text="When correctly positioned press \'space\' to start the calibration.", pos=(int(self.disp.dispsize[0] / 2), int(self.disp.dispsize[1] * 0.1)), colour=(255, 255, 255), fontsize=20)
                self.screen.draw_line(colour=validity_colour, spos=origin, epos=(origin[0] + size[0], origin[1]), pw=1)
                self.screen.draw_line(colour=validity_colour, spos=origin, epos=(origin[0], origin[1] + size[1]), pw=1)
                self.screen.draw_line(colour=validity_colour, spos=(origin[0], origin[1] + size[1]), epos=(origin[0] + size[0], origin[1] + size[1]), pw=1)
                self.screen.draw_line(colour=validity_colour, spos=(origin[0] + size[0], origin[1] + size[1]), epos=(origin[0] + size[0], origin[1]), pw=1)

                right_eye, left_eye, distance = None, None, []
                if gaze_sample['right_gaze_origin_validity']:
                    distance.append(round(gaze_sample['right_gaze_origin_in_user_coordinate_system'][2] / 10, 1))
                    right_eye = ((1 - gaze_sample['right_gaze_origin_in_trackbox_coordinate_system'][0]) * size[0] + origin[0],
                                gaze_sample['right_gaze_origin_in_trackbox_coordinate_system'][1] * size[1] + origin[1])
                    self.screen.draw_circle(colour=validity_colour, pos=right_eye, r=int(self.disp.dispsize[0] / 100), pw=5, fill=True)

                if gaze_sample['left_gaze_origin_validity']:
                    distance.append(round(gaze_sample['left_gaze_origin_in_user_coordinate_system'][2] / 10, 1))
                    left_eye = ((1 - gaze_sample['left_gaze_origin_in_trackbox_coordinate_system'][0]) * size[0] + origin[0],
                                gaze_sample['left_gaze_origin_in_trackbox_coordinate_system'][1] * size[1] + origin[1])
                    self.screen.draw_circle(colour=validity_colour, pos=left_eye, r=int(self.disp.dispsize[0] / 100), pw=5, fill=True)

                self.screen.draw_text(text="Current distance to the eye tracker: {0} cm.".format(self._mean(distance)), pos=(int(self.disp.dispsize[0] / 2), int(self.disp.dispsize[1] * 0.9)), colour=(255, 255, 255), fontsize=20)

                self.disp.fill(self.screen)
                self.disp.show()

            # # # # # #
            # # calibration

            if not self.eyetracker:
                print("WARNING! libtobii.TobiiProTracker.calibrate: no eye trackers found for the calibration!")
                self.stop_recording()
                return False

            calibration = tr.ScreenBasedCalibration(self.eyetracker)

            calibrating = True


            while calibrating:
                calibration.enter_calibration_mode()
                for point in self.points_to_calibrate:
                    self.screen.clear()
		    # CDP : Changement couleur
                    #self.screen.draw_circle(colour='yellow', pos=point, r=int(self.disp.dispsize[0] / 100.0), pw=5, fill=True)
                    #self.screen.draw_circle(colour=(255, 0, 0), pos=point, r=int(self.disp.dispsize[0] / 400.0), pw=5, fill=True)
                    #self.disp.fill(self.screen)
                    #self.disp.show()

                    # Wait a little for user to focus.
		    # CDP : Ajout sasie clavier
                    #clock.pause(1000)

                    self.ReduceBall(point,30,'yellow')

                    pressed_key = self.kb.get_key(keylist=['space', 'r'], flush=True, timeout=None)


                    normalized_point = self._px_2_norm(point)

                    if calibration.collect_data(normalized_point[0], normalized_point[1]) != tr.CALIBRATION_STATUS_SUCCESS:
                        # Try again if it didn't go well the first time.
                        # Not all eye tracker models will fail at this point, but instead fail on ComputeAndApply.
                        calibration.collect_data(normalized_point[0], normalized_point[1])

                self.screen.clear()
                self.screen.draw_text("Calculating calibration result....", colour=(255, 255, 255), fontsize=20)
                self.disp.fill(self.screen)
                self.disp.show()

                calibration_result = calibration.compute_and_apply()


                calibration.leave_calibration_mode()

                print "Compute and apply returned {0} and collected at {1} points.".\
                    format(calibration_result.status, len(calibration_result.calibration_points))

                if calibration_result.status != tr.CALIBRATION_STATUS_SUCCESS:
                    self.stop_recording()
                    print("WARNING! libtobii.TobiiProTracker.calibrate: Calibration was unsuccessful!")
                    return False

                self.screen.clear()
                for point in calibration_result.calibration_points:
                    self.screen.draw_circle(colour=(255, 255, 255), pos=self._norm_2_px(point.position_on_display_area), r=self.disp.dispsize[0] / 200, pw=1, fill=False)
                    for sample in point.calibration_samples:
                        if sample.left_eye.validity == tr.VALIDITY_VALID_AND_USED:
                            self.screen.draw_circle(colour=(255, 0, 0), pos=self._norm_2_px(sample.left_eye.position_on_display_area), r=self.disp.dispsize[0] / 450, pw=self.disp.dispsize[0] / 450, fill=False)
                            self.screen.draw_line(colour=(255, 0, 0), spos=self._norm_2_px(point.position_on_display_area), epos=self._norm_2_px(sample.left_eye.position_on_display_area), pw=1)
                        if sample.right_eye.validity == tr.VALIDITY_VALID_AND_USED:
                            self.screen.draw_circle(colour=(0, 0, 255), pos=self._norm_2_px(sample.right_eye.position_on_display_area), r=self.disp.dispsize[0] / 450, pw=self.disp.dispsize[0] / 450, fill=False)
                            self.screen.draw_line(colour=(0, 0, 255), spos=self._norm_2_px(point.position_on_display_area), epos=self._norm_2_px(sample.right_eye.position_on_display_area), pw=1)

                self.screen.draw_text("Press the \'R\' key to recalibrate or \'Space\' to continue....", pos=(0.5 * self.disp.dispsize[0], 0.95 * self.disp.dispsize[1]), colour=(255, 255, 255), fontsize=20)

                self.screen.draw_text("Left Eye", pos=(0.5 * self.disp.dispsize[0], 0.01 * self.disp.dispsize[1]), colour=(255, 0, 0), fontsize=20)
                self.screen.draw_text("Right Eye", pos=(0.5 * self.disp.dispsize[0], 0.03 * self.disp.dispsize[1]), colour=(0, 0, 255), fontsize=20)

                self.disp.fill(self.screen)
                self.disp.show()

                pressed_key = self.kb.get_key(keylist=['space', 'r'], flush=True, timeout=None)

                if pressed_key[0] == 'space':
                    calibrating = False

        if validate:
            # # # show menu
            self.screen.clear()
            self.screen.draw_text(text="Press space to start validation", colour=(255, 255, 255), fontsize=20)
            self.disp.fill(self.screen)
            self.disp.show()

            # # # wait for spacepress
            self.kb.get_key(keylist=['space'], flush=True, timeout=None)

            # # # # # #
            # # validation

            # # # arrays for data storage
            lxacc, lyacc, rxacc, ryacc = [], [], [], []

            # # loop through all calibration positions
            for pos in self.points_to_calibrate:
                # show validation point
                self.screen.clear()
                self.screen.draw_fixation(fixtype='dot', pos=pos, colour=(255, 255, 255))
                self.disp.fill(self.screen)
                self.disp.show()

                # allow user some time to gaze at dot
                clock.pause(1000)

                lxsamples, lysamples, rxsamples, rysamples = [], [], [], []
                for sample in self.gaze:
                    if sample["left_gaze_point_validity"]:
                        gaze_point = self._norm_2_px(sample["left_gaze_point_on_display_area"])
                        lxsamples.append(abs(gaze_point[0] - pos[0]))
                        lysamples.append(abs(gaze_point[1] - pos[1]))
                    if sample["right_gaze_point_validity"]:
                        gaze_point = self._norm_2_px(sample["right_gaze_point_on_display_area"])
                        rxsamples.append(abs(gaze_point[0] - pos[0]))
                        rysamples.append(abs(gaze_point[1] - pos[1]))

                # calculate mean deviation
                lxacc.append(self._mean(lxsamples))
                lyacc.append(self._mean(lysamples))
                rxacc.append(self._mean(rxsamples))
                ryacc.append(self._mean(rysamples))

                # wait for a bit to slow down validation process a bit
                clock.pause(1000)

            # calculate mean accuracy
            self.pxaccuracy = [(self._mean(lxacc), self._mean(lyacc)), (self._mean(rxacc), self._mean(ryacc))]

            # sample rate
            # calculate intersample times
            timestamps = []
            gaze_samples = copy.copy(self.gaze)
            for i in xrange(0, len(gaze_samples) - 1):
                timestamps.append((gaze_samples[i + 1]['system_time_stamp'] - gaze_samples[i]['system_time_stamp']) / 1000.0)

            # mean intersample time
            self.sampletime = self._mean(timestamps)
            self.samplerate = int(1000.0 / self.sampletime)

            # # # # # #
            # # RMS noise

            # # present instructions
            self.screen.clear()
            self.screen.draw_text(text="Noise calibration: please look at the dot\n\n(press space to start)", pos=(self.disp.dispsize[0] / 2, int(self.disp.dispsize[1] * 0.2)), colour=(255, 255, 255), fontsize=20)
            self.screen.draw_fixation(fixtype='dot', colour=(255, 255, 255))
            self.disp.fill(self.screen)
            self.disp.show()

            # # wait for spacepress
            self.kb.get_key(keylist=['space'], flush=True, timeout=None)

            # # show fixation
            self.screen.draw_fixation(fixtype='dot', colour=(255, 255, 255))
            self.disp.fill(self.screen)
            self.disp.show()
            self.screen.clear()

            # # wait for a bit, to allow participant to fixate
            clock.pause(500)

            # # get samples
            sl = [self.sample()]  # samplelist, prefilled with 1 sample to prevent sl[-1] from producing an error; first sample will be ignored for RMS calculation
            t0 = clock.get_time()  # starting time
            while clock.get_time() - t0 < 1000:
                s = self.sample()  # sample
                if s != sl[-1] and self.is_valid_sample(s) and s != (0, 0):
                    sl.append(s)

            # # calculate RMS noise
            Xvar, Yvar = [], []
            for i in xrange(2, len(sl)):
                Xvar.append((sl[i][0] - sl[i - 1][0])**2)
                Yvar.append((sl[i][1] - sl[i - 1][1])**2)
            XRMS = (self._mean(Xvar))**0.5
            YRMS = (self._mean(Yvar))**0.5
            self.pxdsttresh = (XRMS, YRMS)

            # # # # # # #
            # # # calibration report

            # # # # recalculate thresholds (degrees to pixels)
            self.pxfixtresh = self._deg2pix(self.screendist, self.fixtresh, self.pixpercm)
            self.pxspdtresh = self._deg2pix(self.screendist, self.spdtresh / 1000.0, self.pixpercm)  # in pixels per millisecons
            self.pxacctresh = self._deg2pix(self.screendist, self.accthresh / 1000.0, self.pixpercm)  # in pixels per millisecond**2

            data_to_write = ''
            data_to_write += "pygaze calibration report start\n"
            data_to_write += "samplerate: %s Hz\n" % self.samplerate
            data_to_write += "sampletime: %s ms\n" % self.sampletime
            data_to_write += "accuracy (in pixels): LX=%s, LY=%s, RX=%s, RY=%s\n" % (self.pxaccuracy[0][0], self.pxaccuracy[0][1], self.pxaccuracy[1][0], self.pxaccuracy[1][1])
            data_to_write += "precision (RMS noise in pixels): X=%s, Y=%s\n" % (self.pxdsttresh[0], self.pxdsttresh[1])
            data_to_write += "distance between participant and display: %s cm\n" % self.screendist
            data_to_write += "fixation threshold: %s pixels\n" % self.pxfixtresh
            data_to_write += "speed threshold: %s pixels/ms\n" % self.pxspdtresh
            data_to_write += "accuracy threshold: %s pixels/ms**2\n" % self.pxacctresh
            data_to_write += "pygaze calibration report end\n"

            # # # # write report to log
            #self.datafile.write(data_to_write)

            self.screen.clear()
            self.screen.draw_text(text=data_to_write, pos=(self.disp.dispsize[0] / 2, int(self.disp.dispsize[1] / 2)), colour=(255, 255, 255), fontsize=20)
            self.disp.fill(self.screen)
            self.disp.show()

            self.kb.get_key(keylist=['space'], flush=True, timeout=None)

        self.stop_recording()
        self._write_enabled = True

        return True

    def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30):
        """Performs a fixation triggered drift correction by collecting
        a number of samples and calculating the average distance from the
        fixation position

        arguments
        None

        keyword arguments
        pos			-- (x, y) position of the fixation dot or None for
                       a central fixation (default = None)
        min_samples		-- minimal amount of samples after which an
                       average deviation is calculated (default = 10)
        max_dev		-- maximal deviation from fixation in pixels
                       (default = 60)
        reset_threshold	-- if the horizontal or vertical distance in
                       pixels between two consecutive samples is
                       larger than this threshold, the sample
                       collection is reset (default = 30)

        returns
        checked		-- Boolean indicating if drift check is ok (True)
                       or not (False); or calls self.calibrate if 'q'
                       or 'escape' is pressed
        """
        if pos is None:
            pos = self.disp.dispsize[0] / 2, self.disp.dispsize[1] / 2

        # start recording if recording has not yet started
        if not self.recording:
            self.start_recording()
            stoprec = True
        else:
            stoprec = False

        # loop until we have sufficient samples
        lx = []
        ly = []
        while len(lx) < min_samples:

            # pressing escape enters the calibration screen
            if self.kb.get_key()[0] in ['escape', 'q']:
                print("libtobii.TobiiTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed")
                return self.calibrate(calibrate=True, validate=True)

            # collect a sample
            x, y = self.sample()

            if len(lx) == 0 or (x, y) != (lx[-1], ly[-1]):

                # if present sample deviates too much from previous sample, reset counting
                if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold):
                    lx = []
                    ly = []

                # collect samples
                else:
                    lx.append(x)
                    ly.append(y)

            if len(lx) == min_samples:

                avg_x = self._mean(lx)
                avg_y = self._mean(ly)
                d = ((avg_x - pos[0]) ** 2 + (avg_y - pos[1]) ** 2)**0.5

                if d < max_dev:
                    if stoprec:
                        self.stop_recording()
                    return True
                else:
                    lx = []
                    ly = []
        if stoprec:
            self.stop_recording()

    def drift_correction(self, pos=None, fix_triggered=False):
        """Performs a drift check

        arguments
        None

        keyword arguments
        pos			-- (x, y) position of the fixation dot or None for
                       a central fixation (default = None)
        fix_triggered	-- Boolean indicating if drift check should be
                       performed based on gaze position (fix_triggered
                       = True) or on spacepress (fix_triggered =
                       False) (default = False)

        returns
        checked		-- Boolean indicating if drift check is ok (True)
                       or not (False); or calls self.calibrate if 'q'
                       or 'escape' is pressed
        """
        if fix_triggered:
            return self.fix_triggered_drift_correction(pos)

        if pos is None:
            pos = self.disp.dispsize[0] / 2, self.disp.dispsize[1] / 2

        # start recording if recording has not yet started
        if not self.recording:
            self.start_recording()
            stoprec = True
        else:
            stoprec = False

        result = False
        pressed = False
        while not pressed:
            pressed, presstime = self.kb.get_key()
            if pressed:
                if pressed == 'escape' or pressed == 'q':
                    print("libtobii.TobiiProTracker.drift_correction: 'q' or 'escape' pressed")
                    return self.calibrate(calibrate=True, validate=True)
                gazepos = self.sample()
                if ((gazepos[0] - pos[0])**2 + (gazepos[1] - pos[1])**2)**0.5 < self.pxerrdist:
                    result = True

        if stoprec:
            self.stop_recording()

        return result

    def wait_for_fixation_start(self):
        """Returns starting time and position when a fixation is started;
        function assumes a 'fixation' has started when gaze position
        remains reasonably stable (i.e. when most deviant samples are
        within self.pxfixtresh) for five samples in a row (self.pxfixtresh
        is created in self.calibration, based on self.fixtresh, a property
        defined in self.__init__)

        arguments
        None

        returns
        time, gazepos	-- time is the starting time in milliseconds (from
                       expstart), gazepos is a (x,y) gaze position
                       tuple of the position from which the fixation
                       was initiated
        """
        # # # # #
        # Tobii method

        if self.eventdetection == 'native':
            # print warning, since Tobii does not have a fixation start
            # detection built into their API (only ending)
            print("WARNING! 'native' event detection has been selected, \
                but Tobii does not offer fixation detection; PyGaze \
                algorithm will be used")

        # # # # #
        # PyGaze method

        # function assumes a 'fixation' has started when gaze position
        # remains reasonably stable for self.fixtimetresh

        # start recording if recording has not yet started
        if not self.recording:
            self.start_recording()
            stoprec = True
        else:
            stoprec = False

        # get starting position
        spos = self.sample()
        while not self.is_valid_sample(spos):
            spos = self.sample()

        # get starting time
        t0 = clock.get_time()

        # wait for reasonably stable position
        moving = True
        while moving:
            # get new sample
            npos = self.sample()
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if new sample is too far from starting position
                if (npos[0] - spos[0])**2 + (npos[1] - spos[1])**2 > self.pxfixtresh**2:  # Pythagoras
                    # if not, reset starting position and time
                    spos = copy.copy(npos)
                    t0 = clock.get_time()
                # if new sample is close to starting sample
                else:
                    # get timestamp
                    t1 = clock.get_time()
                    # check if fixation time threshold has been surpassed
                    if t1 - t0 >= self.fixtimetresh:
                        if stoprec:
                            self.stop_recording()
                        # return time and starting position
                        return t0, spos

    def wait_for_fixation_end(self):
        """Returns time and gaze position when a fixation has ended;
        function assumes that a 'fixation' has ended when a deviation of
        more than self.pxfixtresh from the initial fixation position has
        been detected (self.pxfixtresh is created in self.calibration,
        based on self.fixtresh, a property defined in self.__init__)

        arguments
        None

        returns
        time, gazepos	-- time is the starting time in milliseconds (from
                       expstart), gazepos is a (x,y) gaze position
                       tuple of the position from which the fixation
                       was initiated
        """
        # # # # #
        # Tobii method

        if self.eventdetection == 'native':
            # print warning, since Tobii does not have a fixation detection
            # built into their API
            print("WARNING! 'native' event detection has been selected, \
                but Tobii does not offer fixation detection; PyGaze algorithm \
                will be used")

        # # # # #
        # PyGaze method

        # function assumes that a 'fixation' has ended when a deviation of more than fixtresh
        # from the initial 'fixation' position has been detected

        # get starting time and position
        stime, spos = self.wait_for_fixation_start()

        # start recording if recording has not yet started
        if not self.recording:
            self.start_recording()
            stoprec = True
        else:
            stoprec = False

        # loop until fixation has ended
        while True:
            # get new sample
            npos = self.sample()  # get newest sample
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if sample deviates to much from starting position
                if (npos[0] - spos[0])**2 + (npos[1] - spos[1])**2 > self.pxfixtresh**2:
                    # break loop if deviation is too high
                    break

        if stoprec:
            self.stop_recording()

        return clock.get_time(), spos

    def wait_for_saccade_start(self):
        """Returns starting time and starting position when a saccade is
        started; based on Dalmaijer et al. (2013) online saccade detection
        algorithm

        arguments
        None

        returns
        endtime, startpos	-- endtime in milliseconds (from expbegintime);
                       startpos is an (x,y) gaze position tuple
        """
        # # # # #
        # Tobii method

        if self.eventdetection == 'native':
            # print warning, since Tobii does not have a blink detection
            # built into their API
            print("WARNING! 'native' event detection has been selected, \
                but Tobii does not offer saccade detection; PyGaze \
                algorithm will be used")

        # # # # #
        # PyGaze method

        # start recording if recording has not yet started
        if not self.recording:
            self.start_recording()
            stoprec = True
        else:
            stoprec = False

        # get starting position (no blinks)
        newpos = self.sample()
        while not self.is_valid_sample(newpos):
            newpos = self.sample()
        # get starting time, position, intersampledistance, and velocity
        t0 = clock.get_time()
        prevpos = newpos[:]
        s = 0
        v0 = 0

        # get samples
        saccadic = False
        while not saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # check if distance is larger than precision error
                sx = newpos[0] - prevpos[0]
                sy = newpos[1] - prevpos[1]
                if (sx / self.pxdsttresh[0])**2 + (sy / self.pxdsttresh[1])**2 > self.weightdist:  # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise
                    # calculate distance
                    s = ((sx)**2 + (sy)**2)**0.5  # intersampledistance = speed in pixels/ms
                    # calculate velocity
                    v1 = s / (t1 - t0)
                    # calculate acceleration
                    a = (v1 - v0) / (t1 - t0)  # acceleration in pixels/ms**2
                    # check if either velocity or acceleration are above threshold values
                    if v1 > self.pxspdtresh or a > self.pxacctresh:
                        saccadic = True
                        spos = prevpos[:]
                        stime = clock.get_time()
                    # update previous values
                    t0 = copy.copy(t1)
                    v0 = copy.copy(v1)

                # udate previous sample
                prevpos = newpos[:]

        if stoprec:
            self.stop_recording()

        return stime, spos

    def wait_for_saccade_end(self):
        """Returns ending time, starting and end position when a saccade is
        ended; based on Dalmaijer et al. (2013) online saccade detection
        algorithm

        arguments
        None

        returns
        endtime, startpos, endpos	-- endtime in milliseconds (from
                               expbegintime); startpos and endpos
                               are (x,y) gaze position tuples
        """
        # # # # #
        # Tobii method

        if self.eventdetection == 'native':
            # print warning, since Tobii does not have a blink detection
            # built into their API
            print("WARNING! 'native' event detection has been selected, \
                but Tobii does not offer saccade detection; PyGaze \
                algorithm will be used")

        # # # # #
        # PyGaze method

        # get starting position (no blinks)
        t0, spos = self.wait_for_saccade_start()

        # start recording if recording has not yet started
        if not self.recording:
            self.start_recording()
            stoprec = True
        else:
            stoprec = False

        # get valid sample
        prevpos = self.sample()
        while not self.is_valid_sample(prevpos):
            prevpos = self.sample()
        # get starting time, intersample distance, and velocity
        t1 = clock.get_time()
        s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])**2)**0.5  # = intersample distance = speed in px/sample
        v0 = s / (t1 - t0)

        # run until velocity and acceleration go below threshold
        saccadic = True
        while saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # calculate distance
                s = ((newpos[0] - prevpos[0])**2 + (newpos[1] - prevpos[1])**2)**0.5  # = speed in pixels/sample
                # calculate velocity
                v1 = s / (t1 - t0)
                # calculate acceleration
                a = (v1 - v0) / (t1 - t0)  # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample)
                # check if velocity and acceleration are below threshold
                if v1 < self.pxspdtresh and (a > -1 * self.pxacctresh and a < 0):
                    saccadic = False
                    epos = newpos[:]
                    etime = clock.get_time()
                # update previous values
                t0 = copy.copy(t1)
                v0 = copy.copy(v1)
            # udate previous sample
            prevpos = newpos[:]

        if stoprec:
            self.stop_recording()

        return etime, spos, epos

    def wait_for_blink_start(self):
        """Waits for a blink start and returns the blink starting time

        arguments
        None

        returns
        timestamp		--	blink starting time in milliseconds, as
                        measured from experiment begin time
        """
        # # # # #
        # Tobii method

        if self.eventdetection == 'native':
            # print warning, since Tobii does not have a blink detection
            # built into their API
            print("WARNING! 'native' event detection has been selected, \
                but Tobii does not offer blink detection; PyGaze algorithm \
                will be used")

        # # # # #
        # PyGaze method

        # start recording if recording has not yet started
        if not self.recording:
            self.start_recording()
            stoprec = True
        else:
            stoprec = False

        blinking = False

        # loop until there is a blink
        while not blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's a valid sample
            if self.is_valid_sample(gazepos):
                # get timestamp for possible blink start
                t0 = clock.get_time()
                # loop until a blink is determined, or a valid sample occurs
                while not self.is_valid_sample(self.sample()):
                    # check if time has surpassed BLINKTHRESH
                    if clock.get_time() - t0 >= self.blinkthresh:
                        if stoprec:
                            self.stop_recording()
                        # return timestamp of blink start
                        return t0

    def wait_for_blink_end(self):
        """Waits for a blink end and returns the blink ending time

        arguments
        None

        returns
        timestamp		--	blink ending time in milliseconds, as
                        measured from experiment begin time
        """
        # # # # #
        # Tobii method
        if self.eventdetection == 'native':
            # print warning, since Tobii does not have a blink detection
            # built into their API
            print("WARNING! 'native' event detection has been selected, \
                but Tobii does not offer blink detection; PyGaze algorithm \
                will be used")

        # # # # #
        # PyGaze method

        # start recording if recording has not yet started
        if not self.recording:
            self.start_recording()
            stoprec = True
        else:
            stoprec = False

        blinking = True
        # loop while there is a blink
        while blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's valid
            if self.is_valid_sample(gazepos):
                # if it is a valid sample, blinking has stopped
                blinking = False

        if stoprec:
            self.stop_recording()

        # return timestamp of blink end
        return clock.get_time()

    def log(self, msg):
        """Writes a message to the log file

        arguments
        msg		-- a string to include in the log file

        returns
        Nothing	-- uses native log function to include a line
                   in the log file
        """
        t = tr.get_system_time_stamp()
        if not self.t0:
            self.t0 = t
            self._write_header()

        self.datafile.write('%.4f\t%s\n' % ((t - self.t0) / 1000.0, msg))
        self._flush_to_file()

    def _flush_to_file(self):
        # write data to disk
        self.datafile.flush()  # internal buffer to RAM
        os.fsync(self.datafile.fileno())  # RAM file cache to disk

    def _write_header(self):
        # write header
        self.datafile.write('\t'.join(['TimeStamp',
                                       'Event',
                                       'GazePointXLeft',
                                       'GazePointYLeft',
                                       'ValidityLeft',
                                       'GazePointXRight',
                                       'GazePointYRight',
                                       'ValidityRight',
                                       'GazePointX',
                                       'GazePointY',
                                       'PupilSizeLeft',
                                       'PupilValidityLeft',
                                       'PupilSizeRight',
                                       'PupilValidityRight']) + '\n')
        self._flush_to_file()

    def _write_sample(self, sample):
        # write timestamp and gaze position for both eyes to the datafile
        left_gaze_point = self._norm_2_px(sample['left_gaze_point_on_display_area']) if sample['left_gaze_point_validity'] else (-1, -1)
        right_gaze_point = self._norm_2_px(sample['right_gaze_point_on_display_area']) if sample['right_gaze_point_validity'] else (-1, -1)
        self.datafile.write('%.4f\t\t%d\t%d\t%d\t%d\t%d\t%d' % (
                            (sample['system_time_stamp'] - self.t0) / 1000.0,
                            left_gaze_point[0],
                            left_gaze_point[1],
                            sample['left_gaze_point_validity'],
                            right_gaze_point[0],
                            right_gaze_point[1],
                            sample['right_gaze_point_validity']))

        # if no correct sample is available, data is missing
        if not (sample['left_gaze_point_validity'] or sample['right_gaze_point_validity']):  # not detected
            ave = (-1.0, -1.0)
        # if the right sample is unavailable, use left sample
        elif not sample['right_gaze_point_validity']:
            ave = left_gaze_point
        # if the left sample is unavailable, use right sample
        elif not sample['left_gaze_point_validity']:
            ave = right_gaze_point
        # if we have both samples, use both samples
        else:
            ave = (int(round((left_gaze_point[0] + right_gaze_point[0]) / 2.0, 0)),
                   (int(round(left_gaze_point[1] + right_gaze_point[1]) / 2.0)))

        # write gaze position to the datafile, based on the selected sample(s)
        self.datafile.write('\t%d\t%d' % ave)

        left_pupil = sample['left_pupil_diameter'] if sample['left_pupil_validity'] else -1
        right_pupil = sample['right_pupil_diameter'] if sample['right_pupil_validity'] else -1

        self.datafile.write('\t%.4f\t%d\t%.4f\t%d' % (left_pupil,
                            sample['left_pupil_validity'],
                            right_pupil,
                            sample['right_pupil_validity']))

        self.datafile.write('\n')

        self._flush_to_file()

    def close(self):
        """Closes the currently used log file.

        arguments
        None

        returns
        None		--	closes the log file.
        """
        self.datafile.close()
Example #17
0
# PyGaze
from constants import *
from pygaze.display import Display
from pygaze.screen import Screen
from pygaze.eyetracker import EyeTracker
from pygaze.keyboard import Keyboard
from pygaze.libtime import clock


# # # # #
# SETUP

# visuals
disp = Display()
scr = Screen()

# input
tracker = EyeTracker(disp)
kb = Keyboard(keylist=None, timeout=None)

# calibrate
tracker.calibrate()

# starting screen
scr.clear()
scr.draw_text(text="Press Space to start")

disp.fill(scr)
disp.show()
kb.get_key(keylist=['space'], timeout=None, flush=True)
Example #18
0
	def __init__(self, display, tracker):

		"""
		Constructor.

		Arguments:
		display		--	A PyGaze Display object.
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

		pylink.EyeLinkCustomDisplay.__init__(self)

		# objects
		self.display = display
		self.screen = Screen(disptype=DISPTYPE, mousevisible=False)
		self.kb = Keyboard(keylist=None, timeout=1)
		if DISPTYPE == 'pygame':
			self.kb.set_timeout(timeout=0.001)
		# If we are using a DISPTYPE that cannot be used directly, we have to
		# save the camera image to a temporary file on each frame.
		#if DISPTYPE not in ('pygame', 'psychopy'):
		import tempfile
		import os
		self.tmp_file = os.path.join(tempfile.gettempdir(), \
			'__eyelink__.jpg')
		# drawing properties
		self.xc = self.display.dispsize[0]/2
		self.yc = self.display.dispsize[1]/2
		self.ld = 40 # line distance
		# menu
		self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False)
		self.menuscreen.draw_text(text="== Eyelink calibration menu ==", pos= \
			(self.xc,self.yc-5*self.ld), center=True, font='mono', fontsize= \
			12, antialias=True)
		self.menuscreen.draw_text(text="Press C to calibrate", pos=(self.xc, \
			self.yc-3*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		self.menuscreen.draw_text(text="Press V to validate", pos=(self.xc, \
			self.yc-2*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		self.menuscreen.draw_text(text="Press A to auto-threshold", pos=( \
			self.xc,self.yc-1*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		self.menuscreen.draw_text(text="Press Enter to show camera image", \
			pos=(self.xc,self.yc+1*self.ld), center=True, font='mono', \
			fontsize=12, antialias=True)
		self.menuscreen.draw_text(text= \
			"(then change between images using the arrow keys)", pos=(self.xc, \
			self.yc+2*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		self.menuscreen.draw_text(text="Press Q to exit menu", pos=(self.xc, \
			self.yc+5*self.ld), center=True, font='mono', fontsize=12, \
			antialias=True)
		# beeps
		self.__target_beep__ = Sound(osc='sine', freq=440, length=50, attack= \
			0, decay=0, soundfile=None)
		self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200, \
			attack=0, decay=0, soundfile=None)
		self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200, \
			attack=0, decay=0, soundfile=None)
		# further properties
		self.state = None
		self.imagebuffer = array.array('l')
		self.pal = None
		self.size = (0,0)
		self.set_tracker(tracker)
		self.last_mouse_state = -1
Example #19
0
class AleaTracker(BaseEyeTracker):
    """A class for AleaTracker objects"""

    def __init__(self, display, logfile=settings.LOGFILE, \
        alea_key=settings.ALEAKEY, \
        animated_calibration=settings.ALEAANIMATEDCALIBRATION, \
        eventdetection=settings.EVENTDETECTION, \
        saccade_velocity_threshold=35, \
        saccade_acceleration_threshold=9500, \
        blink_threshold=settings.BLINKTHRESH, \
        **args):
        """Initializes the AleaTracker object
        
        arguments
        display    -- a pygaze.display.Display instance
        
        keyword arguments
        logfile    -- logfile name (string value); note that this is the
                   name for the eye data log file (default = LOGFILE)
        """

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, AleaTracker)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        # object properties
        self.disp = display
        self.screen = Screen()
        self.dispsize = self.disp.dispsize  # display size in pixels
        self.screensize = settings.SCREENSIZE  # display size in cm
        self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
        self.errorbeep = Sound(osc='saw', freq=100, length=100)

        # show a message
        self.screen.clear()
        self.screen.draw_text(
            text="Initialising the eye tracker, please wait...", fontsize=20)
        self.disp.fill(self.screen)
        self.disp.show()

        # output file properties
        self.outputfile = logfile + '.tsv'

        # calibration properties
        self.animated_calibration = animated_calibration == True

        # eye tracker properties
        self.connected = False
        self.recording = False
        self.errdist = 2  # degrees; maximal error for drift correction
        self.pxerrdist = 30  # initial error in pixels
        self.maxtries = 100  # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        self.fixtresh = 1.5  # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100  # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
        self.spdtresh = saccade_velocity_threshold  # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold  # degrees per second**2; saccade acceleration threshold
        self.blinkthresh = blink_threshold  # milliseconds; blink detection threshold used in PyGaze method
        self.eventdetection = eventdetection
        self.set_detection_type(self.eventdetection)
        self.weightdist = 10  # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

        # connect to the tracker
        self.alea = OGAleaTracker(alea_key, file_path=self.outputfile)

        # get info on the sample rate
        # TODO: Compute after streaming some samples?
        self.samplerate = 60.0
        self.sampletime = 1000.0 / self.samplerate

        # initiation report
        self.log("pygaze initiation report start")
        self.log("display resolution: {}x{}".format( \
            self.dispsize[0], self.dispsize[1]))
        self.log("display size in cm: {}x{}".format( \
            self.screensize[0], self.screensize[1]))
        self.log("samplerate: {} Hz".format(self.samplerate))
        self.log("sampletime: {} ms".format(self.sampletime))
        self.log("fixation threshold: {} degrees".format(self.fixtresh))
        self.log("speed threshold: {} degrees/second".format(self.spdtresh))
        self.log("acceleration threshold: {} degrees/second**2".format( \
            self.accthresh))
        self.log("pygaze initiation report end")

    def calibrate(self, animated=None, skip_bad_points=False):
        """Calibrates the eye tracking system
        
        arguments
        None
        
        keyword arguments
        animated    --  bool. Set to True to show a parrot animation instead
                        of calibration dots, or False to use standard points.
                        Set to None to use default option.

        skip_bad_points     --  bool. Intelligaze will skip difficult points
                                when set to True. (Default = False)

        returns
        success     --  returns True if calibration succeeded, or False if
                        not; in addition a calibration log is added to the
                        log file and some properties are updated (i.e. the
                        thresholds for detection algorithms)
        """

        # Process animated keyword argument.
        if animated is None:
            animated = self.animated_calibration
        if animated:
            img = "ANIMATION:PARROT"
        else:
            img = ""

        # Show a message.
        self.screen.clear()
        self.screen.draw_text(text="Running calibration in the foreground...",
                              fontsize=20)
        self.disp.fill(self.screen)
        self.disp.show()

        # CALIBRATION
        # Re-run the calibration until it was approved by the user.
        quited = False
        calibration_approved = False
        while not calibration_approved:
            # Wait for the calibration to finish.
            status, improve = self.alea.calibrate(image=img, \
                skip_bad_points=skip_bad_points)
            # Construct a message string.
            if status == 0:
                calib_str = "Calibration completed!"
            else:
                calib_str = "Calibration failed!"
            if improve:
                calib_str += "\n\nWARNING: IntelliGaze recommends repeating the calibration to improve accuracy."
            calib_str += "\n\n\nPress R to retry, or Space to continue."
            # Show calibration results.
            self.screen.clear()
            self.screen.draw_text(text=calib_str, fontsize=20)
            self.disp.fill(self.screen)
            self.disp.show()
            # Wait for user input.
            key = None
            while key not in ["r", "Space", "space", "q"]:
                key, keytime = self.kb.get_key(keylist=['q', 'r', 'space'],
                                               timeout=None,
                                               flush=True)
            # Process key press.
            if key in ["q", "Space", "space"]:
                calibration_approved = True
                if key == "q":
                    quited = True

        # Calibration failed if the user quited.
        if quited:
            return False

        # NOISE CALIBRATION
        # Present noise calibration instructions.
        self.screen.clear()
        self.screen.draw_text(
            text="Noise calibration. Please look at the dot, and press any key to start.",
            fontsize=20, \
            pos=(int(self.dispsize[0]/2),int(self.dispsize[1]*0.3)))
        self.screen.draw_fixation(fixtype="dot")
        self.disp.fill(self.screen)
        self.disp.show()
        # Wait for a keypress.
        key, keytime = self.kb.get_key(keylist=None, timeout=None, \
            flush=True)
        # Start with empty lists.
        err = {'LX': [], 'LY': [], 'RX': [], 'RY': []}
        var = {'LX': [], 'LY': [], 'RX': [], 'RY': []}
        # Start streaming data so that samples can be obtained.
        self.start_recording()
        self.log("noise_calibration_start")
        # Present a central fixation.
        x = int(float(self.dispsize[0]) / 2.0)
        y = int(float(self.dispsize[1]) / 2.0)
        self.screen.clear()
        self.screen.draw_fixation(fixtype="dot", pos=(x, y))
        self.disp.fill(self.screen)
        t0 = self.disp.show()
        # Collect at least 10 samples, and wait for at least 1 second.
        i = 0
        while (i < 10) or (clock.get_time() - t0 < 1000):
            # Get new sample.
            gx, gy = self.sample()
            if (gx > 0) and (gy > 0):
                i += 1
                err["LX"].append(abs(float(x) - float(gx)))
                err["LY"].append(abs(float(y) - float(gy)))
                err["RX"].append(abs(float(x) - float(gx)))
                err["RY"].append(abs(float(y) - float(gy)))
                for k in var.keys():
                    var[k].append(err[k][-1]**2)
                clock.pause(int(self.sampletime))
        # Stop streaming.
        self.log("noise_calibration_stop")
        self.stop_recording()

        # Compute the RMS noise for the calibration points.
        xnoise = (math.sqrt(sum(var['LX']) / float(len(var['LX']))) + \
            math.sqrt(sum(var['RX']) / float(len(var['RX'])))) / 2.0
        ynoise = (math.sqrt(sum(var['LY']) / float(len(var['LY']))) + \
            math.sqrt(sum(var['RY']) / float(len(var['RY'])))) / 2.0
        self.pxdsttresh = (xnoise, ynoise)

        # AFTERMATH
        # store some variables
        pixpercm = (self.dispsize[0] / float(self.screensize[0]) + \
            self.dispsize[1]/float(self.screensize[1])) / 2
        screendist = settings.SCREENDIST
        # calculate thresholds based on tracker settings
        self.accuracy = ( \
            (pix2deg(screendist, sum(err['LX']) / float(len(err['LX'])), pixpercm), \
            pix2deg(screendist, sum(err['LY']) / float(len(err['LY'])), pixpercm)), \
            (pix2deg(screendist, sum(err['RX']) / float(len(err['RX'])), pixpercm), \
            pix2deg(screendist, sum(err['RY']) / float(len(err['RY'])), pixpercm)))
        self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm)
        self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm)
        self.pxaccuracy = ( \
            (sum(err['LX']) / float(len(err['LX'])), \
            sum(err['LY']) / float(len(err['LY']))), \
            (sum(err['RX']) / float(len(err['RX'])), \
            sum(err['RY']) / float(len(err['RY']))))
        self.pxspdtresh = deg2pix(screendist, self.spdtresh / 1000.0,
                                  pixpercm)  # in pixels per millisecond
        self.pxacctresh = deg2pix(screendist, self.accthresh / 1000.0,
                                  pixpercm)  # in pixels per millisecond**2

        # calibration report
        self.log("pygaze calibration report start")
        self.log("accuracy (degrees): LX={}, LY={}, RX={}, RY={}".format( \
            self.accuracy[0][0], self.accuracy[0][1], self.accuracy[1][0], \
            self.accuracy[1][1]))
        self.log("accuracy (in pixels): LX={}, LY={}, RX={}, RY={}".format( \
            self.pxaccuracy[0][0], self.pxaccuracy[0][1], \
            self.pxaccuracy[1][0], self.pxaccuracy[1][1]))
        self.log("precision (RMS noise in pixels): X={}, Y={}".format( \
            self.pxdsttresh[0],self.pxdsttresh[1]))
        self.log("distance between participant and display: {} cm".format( \
            screendist))
        self.log("fixation threshold: {} pixels".format(self.pxfixtresh))
        self.log("speed threshold: {} pixels/ms".format(self.pxspdtresh))
        self.log("acceleration threshold: {} pixels/ms**2".format( \
            self.pxacctresh))
        self.log("pygaze calibration report end")

        return True

    def close(self):
        """Neatly close connection to tracker
        
        arguments
        None
        
        returns
        Nothing    -- saves data and sets self.connected to False
        """

        # close connection
        self.alea.close()
        self.connected = False

    def connected(self):
        """Checks if the tracker is connected
        
        arguments
        None
        
        returns
        connected    -- True if connection is established, False if not;
                   sets self.connected to the same value
        """

        return self.connected

    def drift_correction(self, pos=None, fix_triggered=False):
        """Performs a drift check
        
        arguments
        None
        
        keyword arguments
        pos            -- (x, y) position of the fixation dot or None for
                       a central fixation (default = None)
        fix_triggered    -- Boolean indicating if drift check should be
                       performed based on gaze position (fix_triggered
                       = True) or on spacepress (fix_triggered = 
                       False) (default = False)
        
        returns
        checked        -- Boolaan indicating if drift check is ok (True)
                       or not (False); or calls self.calibrate if 'q'
                       or 'escape' is pressed
        """

        if pos == None:
            pos = (int(self.dispsize[0] / 2), int(self.dispsize[1] / 2))
        if fix_triggered:
            return self.fix_triggered_drift_correction(pos)

        self.draw_drift_correction_target(pos[0], pos[1])

        pressed = False
        while not pressed:
            pressed, presstime = self.kb.get_key()
            if pressed:
                if pressed in ["Escape", "escape", "q"]:
                    print(
                        "libalea.AleaTracker.drift_correction: 'q' or 'escape' pressed"
                    )
                    return self.calibrate()
                gazepos = self.sample()
                if ((gazepos[0] - pos[0])**2 +
                    (gazepos[1] - pos[1])**2)**0.5 < self.pxerrdist:
                    return True
                else:
                    self.errorbeep.play()
        return False

    def draw_drift_correction_target(self, x, y):
        """
        Draws the drift-correction target.
        
        arguments
        
        x        --    The X coordinate
        y        --    The Y coordinate
        """

        self.screen.clear()
        self.screen.draw_fixation(fixtype='dot',
                                  colour=settings.FGC,
                                  pos=(x, y),
                                  pw=0,
                                  diameter=12)
        self.disp.fill(self.screen)
        self.disp.show()

    def draw_calibration_target(self, x, y):

        self.draw_drift_correction_target(x, y)

    def fix_triggered_drift_correction(self,
                                       pos=None,
                                       min_samples=4,
                                       max_dev=120,
                                       timeout=10000):
        """Performs a fixation triggered drift correction by collecting
        a number of samples and calculating the average distance from the
        fixation position
        
        arguments
        None
        
        keyword arguments
        pos            -- (x, y) position of the fixation dot or None for
                       a central fixation (default = None)
        min_samples    -- minimal amount of samples after which a
                       fixation is accepted (default = 4)
        max_dev        -- maximal deviation from fixation in pixels
                       (default = 120)
        timeout        -- Time in milliseconds until fixation-triggering is
                       given up on, and calibration is started 
                       (default = 10000)
        
        returns
        checked        -- Boolean indicating if drift check is ok (True)
                       or not (False); or calls self.calibrate if 'q'
                       or 'escape' is pressed
        """

        if pos == None:
            pos = (int(self.dispsize[0] / 2), int(self.dispsize[1] / 2))

        self.draw_drift_correction_target(pos[0], pos[1])

        t0 = clock.get_time()
        consecutive_count = 0
        while consecutive_count < min_samples:

            # Get new sample.
            x, y = self.sample()

            # Ignore empty samples.
            if (x is None) or (y is None):
                continue

            # Measure the distance to the target position.
            d = ((x - pos[0])**2 + (y - pos[1])**2)**0.5
            # Check whether the distance is below the allowed distance.
            if d <= max_dev:
                # Increment count.
                consecutive_count += 1
            else:
                # Reset count.
                consecutive_count = 0

            # Check for a timeout.
            if clock.get_time() - t0 > timeout:
                print(
                    "libalea.AleaTracker.fix_triggered_drift_correction: timeout during fixation-triggered drift check"
                )
                return self.calibrate()

            # Pressing escape enters the calibration screen.
            if self.kb.get_key()[0] in ["Escape", "escape", "q"]:
                print(
                    "libalea.AleaTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed"
                )
                return self.calibrate()

        return True

    def get_eyetracker_clock_async(self):
        """Not supported for AleaTracker (yet)"""

        print(
            "get_eyetracker_clock_async function not supported for AleaTracker"
        )

    def log(self, msg):
        """Writes a message to the log file
        
        arguments
        ms        -- a string to include in the log file
        
        returns
        Nothing    -- uses native log function of iViewX to include a line
                   in the log file
        """

        self.alea.log(msg)

    def prepare_drift_correction(self, pos):
        """Not supported for AleaTracker"""

        print(
            "prepare_drift_correction function not supported for AleaTracker")

    def pupil_size(self):
        """Return pupil size
        
        arguments
        None
        
        returns
        pupil size    -- returns pupil diameter for the eye that is currently
                   being tracked (as specified by self.eye_used) or -1
                   when no data is obtainable
        """

        # Get the latest sample.
        t, x, y, ps = self.alea.sample()

        # Invalid data.
        if ps == 0:
            return -1

        # Check if the new pupil size is the same as the previous.
        if ps != self.prevps:
            # Update the pupil size.
            self.prevps = copy.copy(ps)

        return self.prevps

    def sample(self):
        """Returns newest available gaze position
        
        arguments
        None
        
        returns
        sample    -- an (x,y) tuple or a (-1,-1) on an error
        """

        # Get the latest sample.
        t, x, y, ps = self.alea.sample()

        # Invalid data.
        if (x == 0) and (y == 0):
            return (-1, -1)

        # Combine the x and y coordinates.
        s = (int(x), int(y))
        # Check if the new sample is the same as the previous.
        if s != self.prevsample:
            # Update the current sample.
            self.prevsample = copy.copy(s)

        return self.prevsample

    def send_command(self, cmd):
        """Function not supported.
        """

        print("send_command function not supported for AleaTracker")

    def start_recording(self):
        """Starts recording eye position
        
        arguments
        None
        
        returns
        Nothing    -- sets self.recording to True when recording is
                   successfully started
        """

        self.alea.start_recording()
        self.recording = True

    def status_msg(self, msg):
        """Not supported for AleaTracker"""

        print("status_msg function not supported for AleaTracker")

    def stop_recording(self):
        """Stop recording eye position
        
        arguments
        None
        
        returns
        Nothing    -- sets self.recording to False when recording is
                   successfully started
        """

        self.alea.stop_recording()
        self.recording = False

    def set_detection_type(self, eventdetection):
        """Set the event detection type to either PyGaze algorithms, or
        native algorithms as provided by the manufacturer (only if
        available: detection type will default to PyGaze if no native
        functions are available)
        
        arguments
        eventdetection    --    a string indicating which detection type
                        should be employed: either 'pygaze' for
                        PyGaze event detection algorithms or
                        'native' for manufacturers algorithms (only
                        if available; will default to 'pygaze' if no
                        native event detection is available)
        returns        --    detection type for saccades, fixations and
                        blinks in a tuple, e.g. 
                        ('pygaze','native','native') when 'native'
                        was passed, but native detection was not
                        available for saccade detection
        """

        if eventdetection in ['pygaze', 'native']:
            self.eventdetection = eventdetection

        return ('pygaze', 'pygaze', 'pygaze')

    def wait_for_event(self, event):
        """Waits for event
        
        arguments
        event        -- an integer event code, one of the following:
                    3 = STARTBLINK
                    4 = ENDBLINK
                    5 = STARTSACC
                    6 = ENDSACC
                    7 = STARTFIX
                    8 = ENDFIX
        
        returns
        outcome    -- a self.wait_for_* method is called, depending on the
                   specified event; the return values of corresponding
                   method are returned
        """

        if event == 5:
            outcome = self.wait_for_saccade_start()
        elif event == 6:
            outcome = self.wait_for_saccade_end()
        elif event == 7:
            outcome = self.wait_for_fixation_start()
        elif event == 8:
            outcome = self.wait_for_fixation_end()
        elif event == 3:
            outcome = self.wait_for_blink_start()
        elif event == 4:
            outcome = self.wait_for_blink_end()
        else:
            raise Exception(
                "Error in libalea.AleaTracker.wait_for_event: eventcode {} is not supported"
                .format(event))

        return outcome

    def wait_for_blink_end(self):
        """Waits for a blink end and returns the blink ending time
        
        arguments
        None
        
        returns
        timestamp        --    blink ending time in milliseconds, as
                        measured from experiment begin time
        """

        # # # # #
        # Native method

        if self.eventdetection == 'native':

            print("WARNING! 'native' event detection not implemented")

        # # # # #
        # PyGaze method

        blinking = True

        # loop while there is a blink
        while blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's valid
            if self.is_valid_sample(gazepos):
                # if it is a valid sample, blinking has stopped
                blinking = False

        # return timestamp of blink end
        return clock.get_time()

    def wait_for_blink_start(self):
        """Waits for a blink start and returns the blink starting time
        
        arguments
        None
        
        returns
        timestamp        --    blink starting time in milliseconds, as
                        measured from experiment begin time
        """

        # # # # #
        # Native method

        if self.eventdetection == 'native':

            print("WARNING! 'native' event detection not implemented")

        # # # # #
        # PyGaze method

        blinking = False

        # loop until there is a blink
        while not blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's a valid sample
            if not self.is_valid_sample(gazepos):
                # get timestamp for possible blink start
                t0 = clock.get_time()
                # loop until a blink is determined, or a valid sample occurs
                while not self.is_valid_sample(self.sample()):
                    # check if time has surpassed BLINKTHRESH
                    if clock.get_time() - t0 >= self.blinkthresh:
                        # return timestamp of blink start
                        return t0

    def wait_for_fixation_end(self):
        """Returns time and gaze position when a fixation has ended;
        function assumes that a 'fixation' has ended when a deviation of
        more than self.pxfixtresh from the initial fixation position has
        been detected (self.pxfixtresh is created in self.calibration,
        based on self.fixtresh, a property defined in self.__init__)
        
        arguments
        None
        
        returns
        time, gazepos    -- time is the starting time in milliseconds (from
                       expstart), gazepos is a (x,y) gaze position
                       tuple of the position from which the fixation
                       was initiated
        """

        # # # # #
        # Native method

        if self.eventdetection == 'native':

            print("WARNING! 'native' event detection not implemented")

        # # # # #
        # PyGaze method

        # function assumes that a 'fixation' has ended when a deviation of more than fixtresh
        # from the initial 'fixation' position has been detected

        # get starting time and position
        stime, spos = self.wait_for_fixation_start()

        # loop until fixation has ended
        while True:
            # get new sample
            npos = self.sample()  # get newest sample
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if sample deviates to much from starting position
                if (npos[0] - spos[0])**2 + (
                        npos[1] -
                        spos[1])**2 > self.pxfixtresh**2:  # Pythagoras
                    # break loop if deviation is too high
                    break

        return clock.get_time(), spos

    def wait_for_fixation_start(self):
        """Returns starting time and position when a fixation is started;
        function assumes a 'fixation' has started when gaze position
        remains reasonably stable (i.e. when most deviant samples are
        within self.pxfixtresh) for five samples in a row (self.pxfixtresh
        is created in self.calibration, based on self.fixtresh, a property
        defined in self.__init__)
        
        arguments
        None
        
        returns
        time, gazepos    -- time is the starting time in milliseconds (from
                       expstart), gazepos is a (x,y) gaze position
                       tuple of the position from which the fixation
                       was initiated
        """

        # # # # #
        # Native method

        if self.eventdetection == 'native':

            print("WARNING! 'native' event detection not implemented")

        # # # # #
        # PyGaze method

        # function assumes a 'fixation' has started when gaze position
        # remains reasonably stable for self.fixtimetresh

        # get starting position
        spos = self.sample()
        while not self.is_valid_sample(spos):
            spos = self.sample()

        # get starting time
        t0 = clock.get_time()

        # wait for reasonably stable position
        moving = True
        while moving:
            # get new sample
            npos = self.sample()
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if new sample is too far from starting position
                if (npos[0] - spos[0])**2 + (
                        npos[1] -
                        spos[1])**2 > self.pxfixtresh**2:  # Pythagoras
                    # if not, reset starting position and time
                    spos = copy.copy(npos)
                    t0 = clock.get_time()
                # if new sample is close to starting sample
                else:
                    # get timestamp
                    t1 = clock.get_time()
                    # check if fixation time threshold has been surpassed
                    if t1 - t0 >= self.fixtimetresh:
                        # return time and starting position
                        return t1, spos

    def wait_for_saccade_end(self):
        """Returns ending time, starting and end position when a saccade is
        ended; based on Dalmaijer et al. (2013) online saccade detection
        algorithm
        
        arguments
        None
        
        returns
        endtime, startpos, endpos    -- endtime in milliseconds (from 
                               expbegintime); startpos and endpos
                               are (x,y) gaze position tuples
        """

        # # # # #
        # Native method

        if self.eventdetection == 'native':

            print("WARNING! 'native' event detection not implemented")

        # # # # #
        # PyGaze method

        # get starting position (no blinks)
        t0, spos = self.wait_for_saccade_start()
        # get valid sample
        prevpos = self.sample()
        while not self.is_valid_sample(prevpos):
            prevpos = self.sample()
        # get starting time, intersample distance, and velocity
        t1 = clock.get_time()
        s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])**
             2)**0.5  # = intersample distance = speed in px/sample
        v0 = s / (t1 - t0)

        # run until velocity and acceleration go below threshold
        saccadic = True
        while saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # calculate distance
                s = ((newpos[0] - prevpos[0])**2 + (newpos[1] - prevpos[1])**
                     2)**0.5  # = speed in pixels/sample
                # calculate velocity
                v1 = s / (t1 - t0)
                # calculate acceleration
                a = (v1 - v0) / (
                    t1 - t0
                )  # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample)
                # check if velocity and acceleration are below threshold
                if v1 < self.pxspdtresh and (a > -1 * self.pxacctresh
                                             and a < 0):
                    saccadic = False
                    epos = newpos[:]
                    etime = clock.get_time()
                # update previous values
                t0 = copy.copy(t1)
                v0 = copy.copy(v1)
            # udate previous sample
            prevpos = newpos[:]

        return etime, spos, epos

    def wait_for_saccade_start(self):
        """Returns starting time and starting position when a saccade is
        started; based on Dalmaijer et al. (2013) online saccade detection
        algorithm
        
        arguments
        None
        
        returns
        endtime, startpos    -- endtime in milliseconds (from expbegintime);
                       startpos is an (x,y) gaze position tuple
        """

        # # # # #
        # Native method

        if self.eventdetection == 'native':

            print("WARNING! 'native' event detection not implemented")

        # # # # #
        # PyGaze method

        # get starting position (no blinks)
        newpos = self.sample()
        while not self.is_valid_sample(newpos):
            newpos = self.sample()
        # get starting time, position, intersampledistance, and velocity
        t0 = clock.get_time()
        prevpos = newpos[:]
        s = 0
        v0 = 0

        # get samples
        saccadic = False
        while not saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # check if distance is larger than precision error
                sx = newpos[0] - prevpos[0]
                sy = newpos[1] - prevpos[1]
                if (sx / self.pxdsttresh[0])**2 + (
                        sy / self.pxdsttresh[1]
                )**2 > self.weightdist:  # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise
                    # calculate distance
                    s = ((sx)**2 + (sy)**
                         2)**0.5  # intersampledistance = speed in pixels/ms
                    # calculate velocity
                    v1 = s / (t1 - t0)
                    # calculate acceleration
                    a = (v1 - v0) / (t1 - t0)  # acceleration in pixels/ms**2
                    # check if either velocity or acceleration are above threshold values
                    if v1 > self.pxspdtresh or a > self.pxacctresh:
                        saccadic = True
                        spos = prevpos[:]
                        stime = clock.get_time()
                    # update previous values
                    t0 = copy.copy(t1)
                    v0 = copy.copy(v1)

                # udate previous sample
                prevpos = newpos[:]

        return stime, spos

    def is_valid_sample(self, gazepos):
        """Checks if the sample provided is valid (for internal use)
        
        arguments
        gazepos        --    a (x,y) gaze position tuple, as returned by
                        self.sample()
        
        returns
        valid        --    a Boolean: True on a valid sample, False on
                        an invalid sample
        """

        # return False if a sample is invalid
        if gazepos == (None, None) or gazepos == (-1, -1) or gazepos == (0, 0):
            return False

        # in any other case, the sample is valid
        return True
Example #20
0
class EyelinkGraphics(custom_display):

	"""
	Implements the EyeLink graphics that are shown on the experimental PC, such
	as the camera image, and the calibration dots. This class only implements
	the drawing operations, and little to no of the logic behind the set-up,
	which is implemented in PyLink.
	"""

	def __init__(self, libeyelink, tracker):

		"""
		Constructor.

		Arguments:
		libeyelink	--	A libeyelink object.
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

		pylink.EyeLinkCustomDisplay.__init__(self)

		# objects
		self.libeyelink = libeyelink
		self.display = libeyelink.display
		self.screen = Screen(disptype=DISPTYPE, mousevisible=False)
		self.kb = Keyboard(keylist=None, timeout=0)
		self.mouse = Mouse(timeout=0)
		if DISPTYPE == 'pygame':
			self.kb.set_timeout(timeout=0.001)
		# If we are using a DISPTYPE that cannot be used directly, we have to
		# save the camera image to a temporary file on each frame.
		#if DISPTYPE not in ('pygame', 'psychopy'):
		import tempfile
		import os
		self.tmp_file = os.path.join(tempfile.gettempdir(), '__eyelink__.jpg')
		# drawing properties
		self.xc = self.display.dispsize[0]/2
		self.yc = self.display.dispsize[1]/2
		self.extra_info = True
		self.ld = 40 # line distance
		self.fontsize = libeyelink.fontsize
		self.title = ""
		self.display_open = True
		# menu
		self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False)		
		self.menuscreen.draw_text(text="Eyelink calibration menu",
			pos=(self.xc,self.yc-6*self.ld), center=True, font='mono',
			fontsize=int(2*self.fontsize), antialias=True)
		self.menuscreen.draw_text(text="%s (pygaze %s, pylink %s)" \
			% (libeyelink.eyelink_model, pygaze.version, pylink.__version__),
			pos=(self.xc,self.yc-5*self.ld), center=True,
			font='mono', fontsize=int(.8*self.fontsize), antialias=True)
		self.menuscreen.draw_text(text="Press C to calibrate", 
			pos=(self.xc, self.yc-3*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press V to validate",
			pos=(self.xc, self.yc-2*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press A to auto-threshold",
			pos=(self.xc,self.yc-1*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press I to toggle extra info in camera image",
			pos=(self.xc,self.yc-0*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press Enter to show camera image",
			pos=(self.xc,self.yc+1*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(
			text="(then change between images using the arrow keys)",
			pos=(self.xc, self.yc+2*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press Escape to abort experiment",
			pos=(self.xc, self.yc+4*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)			
		self.menuscreen.draw_text(text="Press Q to exit menu",
			pos=(self.xc, self.yc+5*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		# beeps
		self.__target_beep__ = Sound(osc='sine', freq=440, length=50, 
			attack=0, decay=0, soundfile=None)
		self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200,
			attack=0, decay=0, soundfile=None)
		self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200,
			attack=0, decay=0, soundfile=None)
		# Colors
		self.color = {
			pylink.CR_HAIR_COLOR:			pygame.Color('white'),
			pylink.PUPIL_HAIR_COLOR:		pygame.Color('white'),
			pylink.PUPIL_BOX_COLOR:			pygame.Color('green'),
			pylink.SEARCH_LIMIT_BOX_COLOR:	pygame.Color('red'),
			pylink.MOUSE_CURSOR_COLOR:		pygame.Color('red'),	
			'font':							pygame.Color('white'),		
			}
		# Font
		pygame.font.init()
		self.font = pygame.font.SysFont('Courier New', 11)
		# further properties
		self.state = None
		self.pal = None
		
		self.size = (0,0)
		self.set_tracker(tracker)
		self.last_mouse_state = -1
		self.bit64 = '64bit' in platform.architecture()
		self.imagebuffer = self.new_array()		
		
	def close(self):
	
		"""
		Is called when the connection and display are shutting down.		
		"""
		
		self.display_open = False
		
	def new_array(self):
	
		"""
		Creates a new array with a system-specific format.
		
		Returns:
		An array.
		"""
		
		# On 64 bit Linux, we need to use an unsigned int data format.
		# <https://www.sr-support.com/showthread.php?3215-Visual-glitch-when-/
		# sending-eye-image-to-display-PC&highlight=ubuntu+pylink>
		if os.name == 'posix' and self.bit64:
			return array.array('I')
		return array.array('L')

	def set_tracker(self, tracker):

		"""
		Connects the tracker to the graphics environment.

		Arguments:
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

		self.tracker = tracker
		self.tracker_version = tracker.getTrackerVersion()
		if self.tracker_version >= 3:
			self.tracker.sendCommand("enable_search_limits=YES")
			self.tracker.sendCommand("track_search_limits=YES")
			self.tracker.sendCommand("autothreshold_click=YES")
			self.tracker.sendCommand("autothreshold_repeat=YES")
			self.tracker.sendCommand("enable_camera_position_detect=YES")

	def setup_cal_display(self):

		"""
		Sets up the initial calibration display, which contains a menu with
		instructions.
		"""
		
		# show instructions
		self.display.fill(self.menuscreen)
		self.display.show()

	def exit_cal_display(self):

		"""Exits calibration display."""

		self.clear_cal_display()

	def record_abort_hide(self):

		"""TODO: What does this do?"""

		pass

	def clear_cal_display(self):

		"""Clears the calibration display"""

		self.display.fill()
		self.display.show()

	def erase_cal_target(self):

		"""TODO: What does this do?"""

		self.clear_cal_display()

	def draw_cal_target(self, x, y):

		"""
		Draws calibration target.

		Arguments:
		x		--	The X coordinate of the target.
		y		--	The Y coordinate of the target.
		"""

		self.play_beep(pylink.CAL_TARG_BEEP)
		self.screen.clear()		
		self.screen.draw_fixation(fixtype='dot', pos=(x,y))
		self.display.fill(screen=self.screen)
		self.display.show()

	def play_beep(self, beepid):

		"""
		Plays a sound.

		Arguments:
		beepid		--	A number that identifies the sound.
		"""

		if beepid == pylink.CAL_TARG_BEEP:
			# For some reason, playing the beep here doesn't work, so we have
			# to play it when the calibration target is drawn.
			if EYELINKCALBEEP:
				self.__target_beep__.play()			
		elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP:
			# show a picture
			self.screen.clear()
			self.screen.draw_text(
				text="calibration lost, press 'Enter' to return to menu",
				pos=(self.xc,self.yc), center=True, font='mono',
				fontsize=self.fontsize, antialias=True)
			self.display.fill(self.screen)
			self.display.show()
			# play beep
			self.__target_beep__error__.play()
		elif beepid == pylink.CAL_GOOD_BEEP:
			self.screen.clear()
			if self.state == "calibration":
				self.screen.draw_text(
					text="Calibration succesfull, press 'v' to validate",
					pos=(self.xc,self.yc), center=True, font='mono',
					fontsize=self.fontsize, antialias=True)
			elif self.state == "validation":
				self.screen.draw_text(
					text="Validation succesfull, press 'Enter' to return to menu",
					pos=(self.xc,self.yc), center=True, font='mono',
					fontsize=self.fontsize, antialias=True)				
			else:
				self.screen.draw_text(text="Press 'Enter' to return to menu",
					pos=(self.xc,self.yc), center=True, font='mono',
					fontsize=self.fontsize, antialias=True)
			# show screen
			self.display.fill(self.screen)
			self.display.show()
			# play beep
			self.__target_beep__done__.play()
		else: #	DC_GOOD_BEEP	or DC_TARG_BEEP
			pass

	def draw_line(self, x1, y1, x2, y2, colorindex):

		"""
		Unlike the function name suggests, this draws a single pixel. I.e.
		the end coordinates are always exactly one pixel away from the start
		coordinates.
		
		Arguments:
		x1			--	The starting x.
		y1			--	The starting y.
		x2			--	The end x.
		y2			--	The end y.
		colorIndex	--	A color index.
		"""

		x1 = int(self.scale*x1)
		y1 = int(self.scale*y1)
		x2 = int(self.scale*x2)
		y2 = int(self.scale*y2)			
		pygame.draw.line(self.cam_img, self.color[colorindex], (x1, y1),
			(x2, y2))
		
	def draw_lozenge(self, x, y, w, h, colorindex):

		"""
		desc:
			Draws a rectangle.
			
		arguments:
			x:
				desc:	X coordinate.
				type:	int
			y:
				desc:	Y coordinate.
				type:	int
			w:
				desc:	A width.
				type:	int
			h:
				desc:	A height.
				type:	int
			colorindex:
				desc:	A colorindex.
				type:	int
		"""

		x = int(self.scale*x)
		y = int(self.scale*y)
		w = int(self.scale*w)
		h = int(self.scale*h)		
		pygame.draw.rect(self.cam_img, self.color[colorindex], (x, y, w, h), 2)
		
	def draw_title(self):
	
		"""
		desc:
			Draws title info.
		"""
	
		y = 0
		for line in self.title:
			surf = self.font.render(line, 0, self.color['font'])
			self.cam_img.blit(surf, (1, y))
			y += 12

	def get_mouse_state(self):

		"""
		desc:
			Gets the mouse position and state.
			
		returns:
			desc:	A (pos, state) tuple.
			type:	tuple.		
		"""
		
		button, pos, time = self.mouse.get_clicked()
		if button == None:
			button = -1
		if pos == None:
			pos = self.mouse.get_pos()
		return pos, button

	def get_input_key(self):

		"""
		Gets an input key.

		Returns:
		A list containing a single pylink key identifier.
		"""

		# Don't try to collect key presses when the display is no longer
		# available. This is necessary, because pylink polls key presses during
		# file transfer, which generally occurs after the display has been
		# closed.
		if not self.display_open:
			return None
		try:
			key, time = self.kb.get_key(keylist=None, timeout='default')
		except:
			self.esc_pressed = True
			key = 'q'
		if key == None:
			return None
		# Escape functions as a 'q' with the additional esc_pressed flag
		if key == 'escape':
			key = 'q'
			self.esc_pressed = True
		# Process regular keys
		if key == "return":
			keycode = pylink.ENTER_KEY
			self.state = None
		elif key == "space":
			keycode = ord(" ")
		elif key == "q":
			keycode = pylink.ESC_KEY
			self.state = None
		elif key == "c":
			keycode = ord("c")
			self.state = "calibration"
		elif key == "v":
			keycode = ord("v")
			self.state = "validation"
		elif key == "a":
			keycode = ord("a")
		elif key == "i":
			self.extra_info = not self.extra_info
			keycode = 0
		elif key == "up":
			keycode = pylink.CURS_UP
		elif key == "down":
			keycode = pylink.CURS_DOWN
		elif key == "left":
			keycode = pylink.CURS_LEFT
		elif key == "right":
			keycode = pylink.CURS_RIGHT
		else:
			keycode = 0
		# Convert key to PyLink keycode and return
		return [pylink.KeyInput(keycode, 0)] # 0 = pygame.KMOD_NONE

	def exit_image_display(self):

		"""Exits the image display."""

		self.clear_cal_display()

	def alert_printf(self,msg):

		"""
		Prints alert message.

		Arguments:
		msg		--	The message to be played.
		"""

		print "eyelink_graphics.alert_printf(): %s" % msg

	def setup_image_display(self, width, height):

		"""
		Initializes the buffer that will contain the camera image.

		Arguments:
		width		--	The width of the image.
		height		--	The height of the image.
		"""

		self.size = width, height
		self.clear_cal_display()
		self.last_mouse_state = -1
		self.imagebuffer = self.new_array()

	def image_title(self, text):

		"""
		Sets the current image title.

		Arguments:
		text	--	An image title.
		"""

		while ': ' in text:
			text = text.replace(': ', ':')
		self.title = text.split()
		
	def draw_image_line(self, width, line, totlines, buff):

		"""
		Draws a single eye video frame, line by line.

		Arguments:

		width		--	Width of the video.
		line		--	Line nr of current line.
		totlines	--	Total lines in video.
		buff		--	Frame buffer.
		imagesize	--	The size of the image, which is (usually?) 192x160 px.
		"""

		# If the buffer hasn't been filled yet, add a line.
		for i in range(width):
			try:
				self.imagebuffer.append(self.pal[buff[i]])
			except:
				pass
		# If the buffer is full, push it to the display.
		if line == totlines:
			self.scale = totlines/320.
			self._size = int(self.scale*self.size[0]), int(
				self.scale*self.size[1])
			# Convert the image buffer to a pygame image, save it ...			
			self.cam_img = pygame.image.fromstring(self.imagebuffer.tostring(),
				self._size, 'RGBX')
			if self.extra_info:
				self.draw_cross_hair()				
				self.draw_title()
			pygame.image.save(self.cam_img, self.tmp_file)
			# ... and then show the image.
			self.screen.clear()
			self.screen.draw_image(self.tmp_file, scale=1.5/self.scale)
			self.display.fill(self.screen)
			self.display.show()			
			# Clear the buffer for the next round!
			self.imagebuffer = self.new_array()

	def set_image_palette(self, r, g, b):

		"""
		Sets the image palette.

		TODO: What this function actually does is highly mysterious. Figure it
		out!

		Arguments:
		r		--	The red channel.
		g		--	The green channel.
		b		--	The blue channel.
		"""

		self.imagebuffer = self.new_array()
		self.clear_cal_display()
		sz = len(r)
		i = 0
		self.pal = []
		while i < sz:
			rf = int(b[i])
			gf = int(g[i])
			bf = int(r[i])
			self.pal.append((rf<<16) | (gf<<8) | (bf))
			i += 1
# PyGaze
from constants import *
from pygaze.display import Display
from pygaze.screen import Screen
from pygaze.eyetracker import EyeTracker
from pygaze.keyboard import Keyboard
from pygaze.libtime import clock


# # # # #
# SETUP

# visuals
disp = Display()
scr = Screen()

# input
tracker = EyeTracker(disp)
kb = Keyboard(keylist=None, timeout=None)

# calibrate
tracker.calibrate()

# starting screen
scr.clear()
scr.draw_text(text="Press Space to start")
disp.fill(scr)
disp.show()
kb.get_key(keylist=['space'], timeout=None, flush=True)
Example #22
0
class EyeTribeTracker(BaseEyeTracker):
    """A class for EyeTribeTracker objects"""
    def __init__(self,
                 display,
                 logfile=settings.LOGFILE,
                 eventdetection=settings.EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 blink_threshold=settings.BLINKTHRESH,
                 **args):
        """Initializes the EyeTribeTracker object
        
        arguments
        display    -- a pygaze.display.Display instance
        
        keyword arguments
        logfile    -- logfile name (string value); note that this is the
                   name for the eye data log file (default = LOGFILE)
        """

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, EyeTribeTracker)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        # object properties
        self.disp = display
        self.screen = Screen()
        self.dispsize = settings.DISPSIZE  # display size in pixels
        self.screensize = settings.SCREENSIZE  # display size in cm
        self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
        self.errorbeep = Sound(osc='saw', freq=100, length=100)

        # output file properties
        self.outputfile = logfile

        # eye tracker properties
        self.connected = False
        self.recording = False
        self.errdist = 2  # degrees; maximal error for drift correction
        self.pxerrdist = 30  # initial error in pixels
        self.maxtries = 100  # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        self.fixtresh = 1.5  # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100  # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
        self.spdtresh = saccade_velocity_threshold  # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold  # degrees per second**2; saccade acceleration threshold
        self.blinkthresh = blink_threshold  # milliseconds; blink detection threshold used in PyGaze method
        self.eventdetection = eventdetection
        self.set_detection_type(self.eventdetection)
        self.weightdist = 10  # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

        # connect to the tracker
        self.eyetribe = EyeTribe(logfilename=logfile)

        # get info on the sample rate
        self.samplerate = self.eyetribe._samplefreq
        self.sampletime = 1000.0 * self.eyetribe._intsampletime

        # initiation report
        self.log("pygaze initiation report start")
        self.log("display resolution: {}x{}".format(self.dispsize[0],
                                                    self.dispsize[1]))
        self.log("display size in cm: {}x{}".format(self.screensize[0],
                                                    self.screensize[1]))
        self.log("samplerate: {} Hz".format(self.samplerate))
        self.log("sampletime: {} ms".format(self.sampletime))
        self.log("fixation threshold: {} degrees".format(self.fixtresh))
        self.log("speed threshold: {} degrees/second".format(self.spdtresh))
        self.log("acceleration threshold: {} degrees/second**2".format(
            self.accthresh))
        self.log("pygaze initiation report end")

    def calibrate(self):
        """Calibrates the eye tracking system
        
        arguments
        None
        
        keyword arguments
        None

        returns
        success    -- returns True if calibration succeeded, or False if
                   not; in addition a calibration log is added to the
                   log file and some properties are updated (i.e. the
                   thresholds for detection algorithms)
        """

        # CALIBRATION
        # determine the calibration points
        calibpoints = []
        for x in [0.1, 0.5, 0.9]:
            for y in [0.1, 0.5, 0.9]:
                calibpoints.append(
                    (int(x * self.dispsize[0]), int(y * self.dispsize[1])))
        random.shuffle(calibpoints)

        # show a message
        self.screen.clear()
        self.screen.draw_text(
            text="Press Space to calibrate, S to skip, and Q to quit",
            fontsize=20)
        self.disp.fill(self.screen)
        self.disp.show()

        # wait for keyboard input
        key, keytime = self.kb.get_key(keylist=['q', 's', 'space'],
                                       timeout=None,
                                       flush=True)
        if key == 's':
            return True
        if key == 'q':
            quited = True
        else:
            quited = False

        # Pause the processing of samples during the calibration.
#        self.eyetribe._pause_sample_processing()
# run until the user is statisfied, or quits
        calibrated = False
        calibresult = None
        while not quited and not calibrated:

            # Clear the existing calibration.
            if self.eyetribe._tracker.get_iscalibrated():
                self.eyetribe._lock.acquire(True)
                self.eyetribe.calibration.clear()
                self.eyetribe._lock.release()

            # Wait for a bit.
            clock.pause(1500)

            # start a new calibration
            if not self.eyetribe._tracker.get_iscalibrating():
                self.eyetribe._lock.acquire(True)
                self.eyetribe.calibration.start(pointcount=len(calibpoints))
                self.eyetribe._lock.release()

            # loop through calibration points
            for cpos in calibpoints:
                # Check whether the calibration is already done.
                # (Not sure how or why, but for some reason some data
                # can persist between calbrations, and the tracker will
                # simply stop allowing further pointstart requests.)
                if self.eyetribe._tracker.get_iscalibrated():
                    break

                # Draw a calibration target.
                self.draw_calibration_target(cpos[0], cpos[1])
                # wait for a bit to allow participant to start looking at
                # the calibration point (#TODO: space press?)
                clock.pause(settings.EYETRIBEPRECALIBDUR)
                # start calibration of point
                self.eyetribe._lock.acquire(True)
                self.eyetribe.calibration.pointstart(cpos[0], cpos[1])
                self.eyetribe._lock.release()
                # wait for a second
                clock.pause(settings.EYETRIBECALIBDUR)
                # stop calibration of this point
                self.eyetribe._lock.acquire(True)
                self.eyetribe.calibration.pointend()
                self.eyetribe._lock.release()
                # check if the Q key has been pressed
                if self.kb.get_key(keylist=['q'], timeout=10,
                                   flush=False)[0] == 'q':
                    # abort calibration
                    self.eyetribe._lock.acquire(True)
                    self.eyetribe.calibration.abort()
                    self.eyetribe._lock.release()
                    # set quited variable and break this for loop
                    quited = True
                    break

            # retry option if the calibration was aborted
            if quited:
                # show retry message
                self.screen.clear()
                self.screen.draw_text(
                    "Calibration aborted. Press Space to restart or 'Q' to quit",
                    fontsize=20)
                self.disp.fill(self.screen)
                self.disp.show()
                # get input
                key, keytime = self.kb.get_key(keylist=['q', 'space'],
                                               timeout=None,
                                               flush=True)
                if key == 'space':
                    # unset quited Boolean
                    quited = False
                # skip further processing
                continue

            # empty display
            self.disp.fill()
            self.disp.show()
            # allow for a bit of calculation time
            # (this is waaaaaay too much)
            clock.pause(1000)
            # get the calibration result
            self.eyetribe._lock.acquire(True)
            calibresult = self.eyetribe._tracker.get_calibresult()
            self.eyetribe._lock.release()

            # results
            # clear the screen
            self.screen.clear()
            # draw results for each point
            if type(calibresult) == dict:
                for p in calibresult['calibpoints']:
                    # only draw the point if data was obtained
                    if p['state'] > 0:
                        # draw the mean error
                        # self.screen.draw_circle(colour=(252,233,79),
                        #     pos=(p['cpx'],p['cpy']), r=p['mepix'], pw=0,
                        #     fill=True)
                        self.screen.draw_line(spos=(p['cpx'], p['cpy']),
                                              epos=(p['mecpx'], p['mecpy']),
                                              pw=2)
                        # draw the point
                        self.screen.draw_fixation(fixtype='dot',
                                                  colour=(115, 210, 22),
                                                  pos=(p['cpx'], p['cpy']))
                        # draw the estimated point
                        self.screen.draw_fixation(fixtype='dot',
                                                  colour=(32, 74, 135),
                                                  pos=(p['mecpx'], p['mecpy']))
                        # annotate accuracy
                        self.screen.draw_text(text="{}".format(\
                            round(p['acd'], ndigits=2)),
                            pos=(p['cpx']+10,p['cpy']+10), fontsize=20)
                    # if no data was obtained, draw the point in red
                    else:
                        self.screen.draw_fixation(fixtype='dot',
                                                  colour=(204, 0, 0),
                                                  pos=(p['cpx'], p['cpy']))
                # draw box for averages
                # self.screen.draw_rect(colour=(238,238,236), x=int(self.dispsize[0]*0.15), y=int(self.dispsize[1]*0.2), w=400, h=200, pw=0, fill=True)
                # draw result
                if calibresult['result']:
                    self.screen.draw_text(text="Calibration successful",
                                          colour=(0, 255, 0),
                                          pos=(int(self.dispsize[0] * 0.5),
                                               int(self.dispsize[1] * 0.25)),
                                          fontsize=20)
                else:
                    self.screen.draw_text(text="Calibration failed",
                                          colour=(255, 0, 0),
                                          pos=(int(self.dispsize[0] * 0.5),
                                               int(self.dispsize[1] * 0.25)),
                                          fontsize=20)
                # draw average accuracy
                self.screen.draw_text(
                    text="Average error = {} degrees".format(round(\
                    calibresult['deg'], ndigits=2)), \
                    pos=(int(self.dispsize[0]*0.5),int(self.dispsize[1]*0.25+30)),
                    fontsize=20)
                # draw input options
                self.screen.draw_text(
                    text="Press Space to continue or 'R' to restart",
                    pos=(int(self.dispsize[0] * 0.5),
                         int(self.dispsize[1] * 0.25 + 60)),
                    fontsize=20)
            else:
                self.screen.draw_text(
                    text="Calibration failed. Press 'R' to try again.",
                    fontsize=20)
            # show the results
            self.disp.fill(self.screen)
            self.disp.show()
            # wait for input
            key, keytime = self.kb.get_key(keylist=['space', 'r'],
                                           timeout=None,
                                           flush=True)
            # process input
            if key == 'space':
                calibrated = True

        # Continue the processing of samples after the calibration.
#        self.eyetribe._unpause_sample_processing()

# calibration failed if the user quited
        if quited:
            return False

        # NOISE CALIBRATION
        # get all error estimates (pixels)
        var = []
        for p in calibresult['calibpoints']:
            # only draw the point if data was obtained
            if p['state'] > 0:
                var.append(p['mepix'])
        noise = sum(var) / float(len(var))
        self.pxdsttresh = (noise, noise)

        # AFTERMATH
        # store some variables
        pixpercm = (self.dispsize[0] / float(self.screensize[0]) +
                    self.dispsize[1] / float(self.screensize[1])) / 2
        screendist = settings.SCREENDIST
        # calculate thresholds based on tracker settings
        self.accuracy = ((calibresult['Ldeg'], calibresult['Ldeg']),
                         (calibresult['Rdeg'], calibresult['Rdeg']))
        self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm)
        self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm)
        self.pxaccuracy = ((deg2pix(screendist, self.accuracy[0][0], pixpercm),
                            deg2pix(screendist, self.accuracy[0][1],
                                    pixpercm)),
                           (deg2pix(screendist, self.accuracy[1][0], pixpercm),
                            deg2pix(screendist, self.accuracy[1][1],
                                    pixpercm)))
        self.pxspdtresh = deg2pix(screendist, self.spdtresh / 1000.0,
                                  pixpercm)  # in pixels per millisecond
        self.pxacctresh = deg2pix(screendist, self.accthresh / 1000.0,
                                  pixpercm)  # in pixels per millisecond**2

        # calibration report
        self.log("pygaze calibration report start")
        self.log("accuracy (degrees): LX={}, LY={}, RX={}, RY={}".format(
            self.accuracy[0][0], self.accuracy[0][1], self.accuracy[1][0], \
            self.accuracy[1][1]))
        self.log("accuracy (in pixels): LX={}, LY={}, RX={}, RY={}".format( \
            self.pxaccuracy[0][0], self.pxaccuracy[0][1], \
            self.pxaccuracy[1][0], self.pxaccuracy[1][1]))
        self.log("precision (RMS noise in pixels): X={}, Y={}".format( \
            self.pxdsttresh[0], self.pxdsttresh[1]))
        self.log("distance between participant and display: {} cm".format( \
            screendist))
        self.log("fixation threshold: {} pixels".format(self.pxfixtresh))
        self.log("speed threshold: {} pixels/ms".format(self.pxspdtresh))
        self.log("acceleration threshold: {} pixels/ms**2".format( \
            self.pxacctresh))
        self.log("pygaze calibration report end")

        return True

    def close(self):
        """Neatly close connection to tracker
        
        arguments
        None
        
        returns
        Nothing    -- saves data and sets self.connected to False
        """

        # close connection
        self.eyetribe.close()
        self.connected = False

    def connected(self):
        """Checks if the tracker is connected
        
        arguments
        None
        
        returns
        connected    -- True if connection is established, False if not;
                   sets self.connected to the same value
        """

        res = self.eyetribe._tracker.get_trackerstate()

        if res == 0:
            self.connected = True
        else:
            self.connected = False

        return self.connected

    def drift_correction(self, pos=None, fix_triggered=False):
        """Performs a drift check
        
        arguments
        None
        
        keyword arguments
        pos            -- (x, y) position of the fixation dot or None for
                       a central fixation (default = None)
        fix_triggered    -- Boolean indicating if drift check should be
                       performed based on gaze position (fix_triggered
                       = True) or on spacepress (fix_triggered = 
                       False) (default = False)
        
        returns
        checked        -- Boolaan indicating if drift check is ok (True)
                       or not (False); or calls self.calibrate if 'q'
                       or 'escape' is pressed
        """

        if pos == None:
            pos = self.dispsize[0] / 2, self.dispsize[1] / 2
        if fix_triggered:
            return self.fix_triggered_drift_correction(pos)
        self.draw_drift_correction_target(pos[0], pos[1])
        pressed = False
        while not pressed:
            pressed, presstime = self.kb.get_key()
            if pressed:
                if pressed == 'escape' or pressed == 'q':
                    print(
                        "libeyetribe.EyeTribeTracker.drift_correction: 'q' or 'escape' pressed"
                    )
                    return self.calibrate()
                gazepos = self.sample()
                if ((gazepos[0] - pos[0])**2 +
                    (gazepos[1] - pos[1])**2)**0.5 < self.pxerrdist:
                    return True
                else:
                    self.errorbeep.play()
        return False

    def draw_drift_correction_target(self, x, y):
        """
        Draws the drift-correction target.
        
        arguments
        
        x        --    The X coordinate
        y        --    The Y coordinate
        """

        self.screen.clear()
        self.screen.draw_fixation(fixtype='dot',
                                  colour=settings.FGC,
                                  pos=(x, y),
                                  pw=0,
                                  diameter=12)
        self.disp.fill(self.screen)
        self.disp.show()

    def draw_calibration_target(self, x, y):

        self.draw_drift_correction_target(x, y)

    def fix_triggered_drift_correction(self,
                                       pos=None,
                                       min_samples=10,
                                       max_dev=60,
                                       reset_threshold=30):
        """Performs a fixation triggered drift correction by collecting
        a number of samples and calculating the average distance from the
        fixation position
        
        arguments
        None
        
        keyword arguments
        pos            -- (x, y) position of the fixation dot or None for
                       a central fixation (default = None)
        min_samples        -- minimal amount of samples after which an
                       average deviation is calculated (default = 10)
        max_dev        -- maximal deviation from fixation in pixels
                       (default = 60)
        reset_threshold    -- if the horizontal or vertical distance in
                       pixels between two consecutive samples is
                       larger than this threshold, the sample
                       collection is reset (default = 30)
        
        returns
        checked        -- Boolaan indicating if drift check is ok (True)
                       or not (False); or calls self.calibrate if 'q'
                       or 'escape' is pressed
        """

        self.draw_drift_correction_target(pos[0], pos[1])
        if pos == None:
            pos = self.dispsize[0] / 2, self.dispsize[1] / 2

        # loop until we have sufficient samples
        lx = []
        ly = []
        while len(lx) < min_samples:

            # pressing escape enters the calibration screen
            if self.kb.get_key()[0] in ['escape', 'q']:
                print(
                    "libeyetribe.EyeTribeTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed"
                )
                return self.calibrate()

            # collect a sample
            x, y = self.sample()

            if len(lx) == 0 or x != lx[-1] or y != ly[-1]:

                # if present sample deviates too much from previous sample, reset counting
                if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold
                                    or abs(y - ly[-1]) > reset_threshold):
                    lx = []
                    ly = []

                # collect samples
                else:
                    lx.append(x)
                    ly.append(y)

            if len(lx) == min_samples:

                avg_x = sum(lx) / len(lx)
                avg_y = sum(ly) / len(ly)
                d = ((avg_x - pos[0])**2 + (avg_y - pos[1])**2)**0.5

                if d < max_dev:
                    return True
                else:
                    lx = []
                    ly = []

    def get_eyetracker_clock_async(self):
        """Not supported for EyeTribeTracker (yet)"""

        print("function not supported yet")

    def log(self, msg):
        """Writes a message to the log file
        
        arguments
        ms        -- a string to include in the log file
        
        returns
        Nothing    -- uses native log function of iViewX to include a line
                   in the log file
        """

        self.eyetribe.log_message(msg)

    def prepare_drift_correction(self, pos):
        """Not supported for EyeTribeTracker (yet)"""

        print("function not supported yet")

    def pupil_size(self):
        """Return pupil size
        
        arguments
        None
        
        returns
        pupil size    -- returns pupil diameter for the eye that is currently
                   being tracked (as specified by self.eye_used) or -1
                   when no data is obtainable
        """

        # get newest pupil size
        ps = self.eyetribe.pupil_size()

        # invalid data
        if ps == None:
            return -1

        # check if the new pupil size is the same as the previous
        if ps != self.prevps:
            # update the pupil size
            self.prevps = copy.copy(ps)

        return self.prevps

    def sample(self):
        """Returns newest available gaze position
        
        arguments
        None
        
        returns
        sample    -- an (x,y) tuple or a (-1,-1) on an error
        """

        # get newest sample
        s = self.eyetribe.sample()

        # invalid data
        if s == (None, None):
            return (-1, -1)

        # check if the new sample is the same as the previous
        if s != self.prevsample:
            # update the current sample
            self.prevsample = copy.copy(s)

        return self.prevsample

    def send_command(self, cmd):
        """Sends a command to the eye tracker
        
        arguments
        cmd        --    the command to be sent to the EyeTribe, which should
                    be a list with the following information:
                        [category, request, values]
        
        returns
        Nothing
        """

        self.eyetribe._connection.request(cmd)

    def start_recording(self):
        """Starts recording eye position
        
        arguments
        None
        
        returns
        Nothing    -- sets self.recording to True when recording is
                   successfully started
        """

        self.eyetribe.start_recording()
        self.recording = True

    def status_msg(self, msg):
        """Not supported for EyeTribeTracker (yet)"""

        print("function not supported yet")

    def stop_recording(self):
        """Stop recording eye position
        
        arguments
        None
        
        returns
        Nothing    -- sets self.recording to False when recording is
                   successfully started
        """

        self.eyetribe.stop_recording()
        self.recording = False

    def set_detection_type(self, eventdetection):
        """Set the event detection type to either PyGaze algorithms, or
        native algorithms as provided by the manufacturer (only if
        available: detection type will default to PyGaze if no native
        functions are available)
        
        arguments
        eventdetection    --    a string indicating which detection type
                        should be employed: either 'pygaze' for
                        PyGaze event detection algorithms or
                        'native' for manufacturers algorithms (only
                        if available; will default to 'pygaze' if no
                        native event detection is available)
        returns        --    detection type for saccades, fixations and
                        blinks in a tuple, e.g. 
                        ('pygaze','native','native') when 'native'
                        was passed, but native detection was not
                        available for saccade detection
        """

        if eventdetection in ['pygaze', 'native']:
            self.eventdetection = eventdetection

        return ('pygaze', 'pygaze', 'pygaze')

    def wait_for_event(self, event):
        """Waits for event
        
        arguments
        event        -- an integer event code, one of the following:
                    3 = STARTBLINK
                    4 = ENDBLINK
                    5 = STARTSACC
                    6 = ENDSACC
                    7 = STARTFIX
                    8 = ENDFIX
        
        returns
        outcome    -- a self.wait_for_* method is called, depending on the
                   specified event; the return values of corresponding
                   method are returned
        """

        if event == 5:
            outcome = self.wait_for_saccade_start()
        elif event == 6:
            outcome = self.wait_for_saccade_end()
        elif event == 7:
            outcome = self.wait_for_fixation_start()
        elif event == 8:
            outcome = self.wait_for_fixation_end()
        elif event == 3:
            outcome = self.wait_for_blink_start()
        elif event == 4:
            outcome = self.wait_for_blink_end()
        else:
            raise Exception(
                "Error in libeyetribe.EyeTribeTracker.wait_for_event: eventcode {} is not supported"
                .format(event))

        return outcome

    def wait_for_blink_end(self):
        """Waits for a blink end and returns the blink ending time
        
        arguments
        None
        
        returns
        timestamp        --    blink ending time in milliseconds, as
                        measured from experiment begin time
        """

        # # # # #
        # EyeTribe method

        if self.eventdetection == 'native':

            # print warning, since EyeTribe does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
                but EyeTribe does not offer blink detection; PyGaze algorithm \
                will be used")

        # # # # #
        # PyGaze method

        blinking = True

        # loop while there is a blink
        while blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's valid
            if self.is_valid_sample(gazepos):
                # if it is a valid sample, blinking has stopped
                blinking = False

        # return timestamp of blink end
        return clock.get_time()

    def wait_for_blink_start(self):
        """Waits for a blink start and returns the blink starting time
        
        arguments
        None
        
        returns
        timestamp        --    blink starting time in milliseconds, as
                        measured from experiment begin time
        """

        # # # # #
        # EyeTribe method

        if self.eventdetection == 'native':

            # print warning, since EyeTribe does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
                but EyeTribe does not offer blink detection; PyGaze algorithm \
                will be used")

        # # # # #
        # PyGaze method

        blinking = False

        # loop until there is a blink
        while not blinking:
            # get newest sample
            gazepos = self.sample()
            # check if it's a valid sample
            if not self.is_valid_sample(gazepos):
                # get timestamp for possible blink start
                t0 = clock.get_time()
                # loop until a blink is determined, or a valid sample occurs
                while not self.is_valid_sample(self.sample()):
                    # check if time has surpassed BLINKTHRESH
                    if clock.get_time() - t0 >= self.blinkthresh:
                        # return timestamp of blink start
                        return t0

    def wait_for_fixation_end(self):
        """Returns time and gaze position when a fixation has ended;
        function assumes that a 'fixation' has ended when a deviation of
        more than self.pxfixtresh from the initial fixation position has
        been detected (self.pxfixtresh is created in self.calibration,
        based on self.fixtresh, a property defined in self.__init__)
        
        arguments
        None
        
        returns
        time, gazepos    -- time is the starting time in milliseconds (from
                       expstart), gazepos is a (x,y) gaze position
                       tuple of the position from which the fixation
                       was initiated
        """

        # # # # #
        # EyeTribe method

        if self.eventdetection == 'native':

            # print warning, since EyeTribe does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
                but EyeTribe does not offer fixation detection; \
                PyGaze algorithm will be used")

        # # # # #
        # PyGaze method

        # function assumes that a 'fixation' has ended when a deviation of more than fixtresh
        # from the initial 'fixation' position has been detected

        # get starting time and position
        stime, spos = self.wait_for_fixation_start()

        # loop until fixation has ended
        while True:
            # get new sample
            npos = self.sample()  # get newest sample
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if sample deviates to much from starting position
                if (npos[0] - spos[0])**2 + (
                        npos[1] -
                        spos[1])**2 > self.pxfixtresh**2:  # Pythagoras
                    # break loop if deviation is too high
                    break

        return clock.get_time(), spos

    def wait_for_fixation_start(self):
        """Returns starting time and position when a fixation is started;
        function assumes a 'fixation' has started when gaze position
        remains reasonably stable (i.e. when most deviant samples are
        within self.pxfixtresh) for five samples in a row (self.pxfixtresh
        is created in self.calibration, based on self.fixtresh, a property
        defined in self.__init__)
        
        arguments
        None
        
        returns
        time, gazepos    -- time is the starting time in milliseconds (from
                       expstart), gazepos is a (x,y) gaze position
                       tuple of the position from which the fixation
                       was initiated
        """

        # # # # #
        # EyeTribe method

        if self.eventdetection == 'native':

            # print warning, since EyeTribe does not have a fixation start
            # detection built into their API (only ending)

            print("WARNING! 'native' event detection has been selected, \
                but EyeTribe does not offer fixation detection; \
                PyGaze algorithm will be used")

        # # # # #
        # PyGaze method

        # function assumes a 'fixation' has started when gaze position
        # remains reasonably stable for self.fixtimetresh

        # get starting position
        spos = self.sample()
        while not self.is_valid_sample(spos):
            spos = self.sample()

        # get starting time
        t0 = clock.get_time()

        # wait for reasonably stable position
        moving = True
        while moving:
            # get new sample
            npos = self.sample()
            # check if sample is valid
            if self.is_valid_sample(npos):
                # check if new sample is too far from starting position
                if (npos[0] - spos[0])**2 + (
                        npos[1] -
                        spos[1])**2 > self.pxfixtresh**2:  # Pythagoras
                    # if not, reset starting position and time
                    spos = copy.copy(npos)
                    t0 = clock.get_time()
                # if new sample is close to starting sample
                else:
                    # get timestamp
                    t1 = clock.get_time()
                    # check if fixation time threshold has been surpassed
                    if t1 - t0 >= self.fixtimetresh:
                        # return time and starting position
                        return t1, spos

    def wait_for_saccade_end(self):
        """Returns ending time, starting and end position when a saccade is
        ended; based on Dalmaijer et al. (2013) online saccade detection
        algorithm
        
        arguments
        None
        
        returns
        endtime, startpos, endpos    -- endtime in milliseconds (from 
                               expbegintime); startpos and endpos
                               are (x,y) gaze position tuples
        """

        # # # # #
        # EyeTribe method

        if self.eventdetection == 'native':

            # print warning, since EyeTribe does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
                but EyeTribe does not offer saccade detection; PyGaze \
                algorithm will be used")

        # # # # #
        # PyGaze method

        # get starting position (no blinks)
        t0, spos = self.wait_for_saccade_start()
        # get valid sample
        prevpos = self.sample()
        while not self.is_valid_sample(prevpos):
            prevpos = self.sample()
        # get starting time, intersample distance, and velocity
        t1 = clock.get_time()
        s = ((prevpos[0] - spos[0])**2 + (prevpos[1] - spos[1])**
             2)**0.5  # = intersample distance = speed in px/sample
        v0 = s / (t1 - t0)

        # run until velocity and acceleration go below threshold
        saccadic = True
        while saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # calculate distance
                s = ((newpos[0] - prevpos[0])**2 + (newpos[1] - prevpos[1])**
                     2)**0.5  # = speed in pixels/sample
                # calculate velocity
                v1 = s / (t1 - t0)
                # calculate acceleration
                a = (v1 - v0) / (
                    t1 - t0
                )  # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample)
                # check if velocity and acceleration are below threshold
                if v1 < self.pxspdtresh and (a > -1 * self.pxacctresh
                                             and a < 0):
                    saccadic = False
                    epos = newpos[:]
                    etime = clock.get_time()
                # update previous values
                t0 = copy.copy(t1)
                v0 = copy.copy(v1)
            # udate previous sample
            prevpos = newpos[:]

        return etime, spos, epos

    def wait_for_saccade_start(self):
        """Returns starting time and starting position when a saccade is
        started; based on Dalmaijer et al. (2013) online saccade detection
        algorithm
        
        arguments
        None
        
        returns
        endtime, startpos    -- endtime in milliseconds (from expbegintime);
                       startpos is an (x,y) gaze position tuple
        """

        # # # # #
        # EyeTribe method

        if self.eventdetection == 'native':

            # print warning, since EyeTribe does not have a blink detection
            # built into their API

            print("WARNING! 'native' event detection has been selected, \
                but EyeTribe does not offer saccade detection; PyGaze \
                algorithm will be used")

        # # # # #
        # PyGaze method

        # get starting position (no blinks)
        newpos = self.sample()
        while not self.is_valid_sample(newpos):
            newpos = self.sample()
        # get starting time, position, intersampledistance, and velocity
        t0 = clock.get_time()
        prevpos = newpos[:]
        s = 0
        v0 = 0

        # get samples
        saccadic = False
        while not saccadic:
            # get new sample
            newpos = self.sample()
            t1 = clock.get_time()
            if self.is_valid_sample(newpos) and newpos != prevpos:
                # check if distance is larger than precision error
                sx = newpos[0] - prevpos[0]
                sy = newpos[1] - prevpos[1]
                if (sx / self.pxdsttresh[0])**2 + (
                        sy / self.pxdsttresh[1]
                )**2 > self.weightdist:  # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise
                    # calculate distance
                    s = ((sx)**2 + (sy)**
                         2)**0.5  # intersampledistance = speed in pixels/ms
                    # calculate velocity
                    v1 = s / (t1 - t0)
                    # calculate acceleration
                    a = (v1 - v0) / (t1 - t0)  # acceleration in pixels/ms**2
                    # check if either velocity or acceleration are above threshold values
                    if v1 > self.pxspdtresh or a > self.pxacctresh:
                        saccadic = True
                        spos = prevpos[:]
                        stime = clock.get_time()
                    # update previous values
                    t0 = copy.copy(t1)
                    v0 = copy.copy(v1)

                # udate previous sample
                prevpos = newpos[:]

        return stime, spos

    def is_valid_sample(self, gazepos):
        """Checks if the sample provided is valid, based on EyeTribe specific
        criteria (for internal use)
        
        arguments
        gazepos        --    a (x,y) gaze position tuple, as returned by
                        self.sample()
        
        returns
        valid        --    a Boolean: True on a valid sample, False on
                        an invalid sample
        """

        # return False if a sample is invalid
        if gazepos == (None, None) or gazepos == (-1, -1):
            return False

        # in any other case, the sample is valid
        return True
Example #23
0
# # # # #
# directory stuff

DIR = os.path.split(os.path.abspath(__file__))[0]
soundfile = os.path.join(DIR, 'bark.ogg')
imagefile = os.path.join(DIR, 'kitten.png')


# # # # #
# create instances

# initialize the display
disp = Display()

# initialize a screen
scr = Screen()

# initialize a keyboard
kb = Keyboard(keylist=['space'],timeout=None)

# initialize a mouse
mouse = Mouse(mousebuttonlist=None, timeout=None)

# initialize a sound
snd = Sound(osc='sine', freq=4400, length=3000)
sounds = {
	'a sine wave (slightly oscillating)':Sound(osc='sine', freq=440, length=5000, attack=1000, decay=1000),
	'a saw wave':Sound(osc='saw', freq=880, length=5000, attack=0, decay=0),
	'a square wave':Sound(osc='square', freq=1760, length=5000, attack=0, decay=0),
	'white noise':Sound(osc='whitenoise'),
	'soundfile':Sound(soundfile=soundfile)
Example #24
0
# # # # #
# directory stuff

DIR = os.path.split(os.path.abspath(__file__))[0]
soundfile = os.path.join(DIR, 'bark.ogg')
imagefile = os.path.join(DIR, 'kitten.png')


# # # # #
# create instances

# initialize the display
disp = Display()

# initialize a screen
scr = Screen()

# initialize an EyeTracker
tracker = EyeTracker(disp)

# initialize a keyboard
kb = Keyboard(keylist=['space'],timeout=None)

# initialize a sound
snd = Sound(soundfile=soundfile)

# initialize a Timer
timer = Time()

# create a new logfile
log = Logfile(filename="test")
Example #25
0
    def __init__(self, display,
        logfile=settings.LOGFILE, \
        eventdetection=settings.EVENTDETECTION, \
        saccade_velocity_threshold=35, \
        saccade_acceleration_threshold=9500, \
        blink_threshold=settings.BLINKTHRESH, \
        **args):

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, EyeLogicTracker)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        self.disp = display
        self.screen = Screen()
        self.dispsize = self.disp.dispsize # display size in pixels
        self.screensize = settings.SCREENSIZE # display size in cm
        self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
        self.errorbeep = Sound(osc='saw', freq=100, length=100)

        # show a message
        self.screen.clear()
        self.screen.draw_text(
            text="Initialising the eye tracker, please wait...",
            fontsize=20)
        self.disp.fill(self.screen)
        self.disp.show()

        # output file properties
        self.logfile = logfile

        # eye tracker properties
        self._recording = Event()
        self._recording.clear()
        self._calibrated = Event()
        self._calibrated.clear()
        self.eye_used = 2 # 0=left, 1=right, 2=binocular
        self.sampleLock = Lock()
        self.lastSample = None
        self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)

        # event detection properties
        self.pxfixtresh = 50;
        self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation

        self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold
        self.blinkthresh = blink_threshold # milliseconds; blink detection threshold used in PyGaze method
        self.eventdetection = eventdetection

        self._log_vars = [ \
            "timestampMicroSec", \
            "index", \
            "porFilteredX", \
            "porFilteredY", \
            "porLeftX", \
            "porLeftY", \
            "pupilRadiusLeft", \
            "porRightX", \
            "porRightY", \
            "pupilRadiusRight", \
            ]
        # Open a new log file.
        dir_name = os.path.dirname(logfile)
        file_name = os.path.basename(logfile)
        name, ext = os.path.splitext(file_name)
        self._data_file_path = os.path.join(dir_name, name+".eyelogic.csv")
        self._log_file = open(self._data_file_path, "w")
        # Write a header to the log.
        header = ["TYPE"]
        header.extend(self._log_vars)
        self._sep = ";"
        self._log_file.write("Sep="+self._sep+"\n")
        self._log_file.write(self._sep.join(map(str, header)))
        # Create a lock to prevent simultaneous access to the log file.
        self._logging_queue = Queue()
        self._logging_queue_empty = Event()
        self._logging_queue_empty.set()
        self._connected = Event()
        self._connected.set()
        self._log_counter = 0
        self._log_consolidation_freq = 60
        
        self._logging_thread = Thread( target=self.loggingThread, \
                name='PyGaze_EyeLogic_Logging', args=[])

        global g_api
        g_api = self

        # log
        self.log("pygaze initiation")
        #self.log("experiment = {}".format(self.description))
        #self.log("participant = {}".format(self.participant))
        self.log("display resolution = {}x{}".format(self.dispsize[0], \
            self.dispsize[1]))
        self.log("display size in cm = {}x{}".format(self.screensize[0], \
            self.screensize[1]))
        self.log("fixation threshold = {} degrees".format(self.fixtresh))
        self.log("speed threshold = {} degrees/second".format(self.spdtresh))
        self.log("acceleration threshold = {} degrees/second**2".format( \
            self.accthresh))

        # connect
        self.api = ELApi( "PyGaze" )
        self.api.registerGazeSampleCallback( gazeSampleCallback )
        self.api.registerEventCallback( eventCallback )

        resultConnect = self.api.connect()
        if (resultConnect != ELApi.ReturnConnect.SUCCESS):
            self._connected.clear()
            raise Exception("Cannot connect to EyeLogic server = {}".format(errorstringConnect(resultConnect)))
        self._connected.set()

        screenConfig = self.api.getScreenConfig()
        self.log("eye tracker is mounted on screen {}".format(screenConfig.id))
        self.rawResolution = (screenConfig.resolutionX, screenConfig.resolutionY)
        self.log("raw screen resolution = {}x{}".format(
            self.rawResolution[0], self.rawResolution[1]))
        self.log("end pygaze initiation")

        deviceConfig = self.api.getDeviceConfig()
        if (deviceConfig.deviceSerial == 0):
            raise Exception("no eye tracking device connected")
        if (len(deviceConfig.frameRates) == 0):
            raise Exception("failed to read out device configuration")
        g_api.sampleRate = deviceConfig.frameRates[0]
        g_api.sampleTime = 1000.0 / g_api.sampleRate
        g_api.log("samplerate = {} Hz".format(g_api.sampleRate))
        g_api.log("sampletime = {} ms".format(g_api.sampleTime))
        self._logging_thread.start()

        self.screen.clear()
        self.disp.fill(self.screen)
        self.disp.show()
Example #26
0
    def __init__(self, libeyelink, tracker):

        """
		Constructor.

		Arguments:
		libeyelink	--	A libeyelink object.
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

        pylink.EyeLinkCustomDisplay.__init__(self)

        # objects
        self.libeyelink = libeyelink
        self.display = libeyelink.display
        self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False)
        self.kb = Keyboard(keylist=None, timeout=0)
        self.mouse = Mouse(timeout=0)
        if settings.DISPTYPE == "pygame":
            self.kb.set_timeout(timeout=0.001)
            # If we are using a DISPTYPE that cannot be used directly, we have to
            # save the camera image to a temporary file on each frame.
            # if DISPTYPE not in ('pygame', 'psychopy'):
        import tempfile
        import os

        self.tmp_file = os.path.join(tempfile.gettempdir(), "__eyelink__.jpg")
        # drawing properties
        self.xc = self.display.dispsize[0] / 2
        self.yc = self.display.dispsize[1] / 2
        self.extra_info = True
        self.ld = 40  # line distance
        self.fontsize = libeyelink.fontsize
        self.title = ""
        self.display_open = True
        self.draw_menu_screen()
        # beeps
        self.__target_beep__ = Sound(osc="sine", freq=440, length=50, attack=0, decay=0, soundfile=None)
        self.__target_beep__done__ = Sound(osc="sine", freq=880, length=200, attack=0, decay=0, soundfile=None)
        self.__target_beep__error__ = Sound(osc="sine", freq=220, length=200, attack=0, decay=0, soundfile=None)
        # Colors
        self.color = {
            pylink.CR_HAIR_COLOR: pygame.Color("white"),
            pylink.PUPIL_HAIR_COLOR: pygame.Color("white"),
            pylink.PUPIL_BOX_COLOR: pygame.Color("green"),
            pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color("red"),
            pylink.MOUSE_CURSOR_COLOR: pygame.Color("red"),
            "font": pygame.Color("white"),
        }
        # Font
        pygame.font.init()
        self.font = pygame.font.SysFont("Courier New", 11)
        # further properties
        self.state = None
        self.pal = None

        self.size = (0, 0)
        self.set_tracker(tracker)
        self.last_mouse_state = -1
        self.bit64 = "64bit" in platform.architecture()
        self.imagebuffer = self.new_array()
Example #27
0
	def __init__(self, libeyelink, tracker):

		"""
		Constructor.

		Arguments:
		libeyelink	--	A libeyelink object.
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

		pylink.EyeLinkCustomDisplay.__init__(self)

		# objects
		self.libeyelink = libeyelink
		self.display = libeyelink.display
		self.screen = Screen(disptype=DISPTYPE, mousevisible=False)
		self.kb = Keyboard(keylist=None, timeout=0)
		self.mouse = Mouse(timeout=0)
		if DISPTYPE == 'pygame':
			self.kb.set_timeout(timeout=0.001)
		# If we are using a DISPTYPE that cannot be used directly, we have to
		# save the camera image to a temporary file on each frame.
		#if DISPTYPE not in ('pygame', 'psychopy'):
		import tempfile
		import os
		self.tmp_file = os.path.join(tempfile.gettempdir(), '__eyelink__.jpg')
		# drawing properties
		self.xc = self.display.dispsize[0]/2
		self.yc = self.display.dispsize[1]/2
		self.extra_info = True
		self.ld = 40 # line distance
		self.fontsize = libeyelink.fontsize
		self.title = ""
		self.display_open = True
		# menu
		self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False)		
		self.menuscreen.draw_text(text="Eyelink calibration menu",
			pos=(self.xc,self.yc-6*self.ld), center=True, font='mono',
			fontsize=int(2*self.fontsize), antialias=True)
		self.menuscreen.draw_text(text="%s (pygaze %s, pylink %s)" \
			% (libeyelink.eyelink_model, pygaze.version, pylink.__version__),
			pos=(self.xc,self.yc-5*self.ld), center=True,
			font='mono', fontsize=int(.8*self.fontsize), antialias=True)
		self.menuscreen.draw_text(text="Press C to calibrate", 
			pos=(self.xc, self.yc-3*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press V to validate",
			pos=(self.xc, self.yc-2*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press A to auto-threshold",
			pos=(self.xc,self.yc-1*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press I to toggle extra info in camera image",
			pos=(self.xc,self.yc-0*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press Enter to show camera image",
			pos=(self.xc,self.yc+1*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(
			text="(then change between images using the arrow keys)",
			pos=(self.xc, self.yc+2*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		self.menuscreen.draw_text(text="Press Escape to abort experiment",
			pos=(self.xc, self.yc+4*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)			
		self.menuscreen.draw_text(text="Press Q to exit menu",
			pos=(self.xc, self.yc+5*self.ld), center=True, font='mono',
			fontsize=self.fontsize, antialias=True)
		# beeps
		self.__target_beep__ = Sound(osc='sine', freq=440, length=50, 
			attack=0, decay=0, soundfile=None)
		self.__target_beep__done__ = Sound(osc='sine', freq=880, length=200,
			attack=0, decay=0, soundfile=None)
		self.__target_beep__error__ = Sound(osc='sine', freq=220, length=200,
			attack=0, decay=0, soundfile=None)
		# Colors
		self.color = {
			pylink.CR_HAIR_COLOR:			pygame.Color('white'),
			pylink.PUPIL_HAIR_COLOR:		pygame.Color('white'),
			pylink.PUPIL_BOX_COLOR:			pygame.Color('green'),
			pylink.SEARCH_LIMIT_BOX_COLOR:	pygame.Color('red'),
			pylink.MOUSE_CURSOR_COLOR:		pygame.Color('red'),	
			'font':							pygame.Color('white'),		
			}
		# Font
		pygame.font.init()
		self.font = pygame.font.SysFont('Courier New', 11)
		# further properties
		self.state = None
		self.pal = None
		
		self.size = (0,0)
		self.set_tracker(tracker)
		self.last_mouse_state = -1
		self.bit64 = '64bit' in platform.architecture()
		self.imagebuffer = self.new_array()		
Example #28
0
class EyeTribeTracker(BaseEyeTracker):

	"""A class for EyeTribeTracker objects"""

	def __init__(self, display, logfile=LOGFILE, eventdetection=EVENTDETECTION, \
		saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, \
		**args):

		"""Initializes the EyeTribeTracker object
		
		arguments
		display	-- a pygaze.display.Display instance
		
		keyword arguments
		logfile	-- logfile name (string value); note that this is the
				   name for the eye data log file (default = LOGFILE)
		"""

		# try to copy docstrings (but ignore it if it fails, as we do
		# not need it for actual functioning of the code)
		try:
			copy_docstr(BaseEyeTracker, EyeTribeTracker)
		except:
			# we're not even going to show a warning, since the copied
			# docstring is useful for code editors; these load the docs
			# in a non-verbose manner, so warning messages would be lost
			pass

		# object properties
		self.disp = display
		self.screen = Screen()
		self.dispsize = DISPSIZE # display size in pixels
		self.screensize = SCREENSIZE # display size in cm
		self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
		self.errorbeep = Sound(osc='saw',freq=100, length=100)
		
		# output file properties
		self.outputfile = logfile
		
		# eye tracker properties
		self.connected = False
		self.recording = False
		self.errdist = 2 # degrees; maximal error for drift correction
		self.pxerrdist = 30 # initial error in pixels
		self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
		self.prevsample = (-1,-1)
		self.prevps = -1
		
		# event detection properties
		self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
		self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
		self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold
		self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold
		self.eventdetection = eventdetection
		self.set_detection_type(self.eventdetection)
		self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

		# connect to the tracker
		self.eyetribe = EyeTribe(logfilename=logfile)

		# get info on the sample rate
		self.samplerate = self.eyetribe._samplefreq
		self.sampletime = 1000.0 * self.eyetribe._intsampletime

		# initiation report
		self.log("pygaze initiation report start")
		self.log("display resolution: %sx%s" % (self.dispsize[0],self.dispsize[1]))
		self.log("display size in cm: %sx%s" % (self.screensize[0],self.screensize[1]))
		self.log("samplerate: %.2f Hz" % self.samplerate)
		self.log("sampletime: %.2f ms" % self.sampletime)
		self.log("fixation threshold: %s degrees" % self.fixtresh)
		self.log("speed threshold: %s degrees/second" % self.spdtresh)
		self.log("acceleration threshold: %s degrees/second**2" % self.accthresh)
		self.log("pygaze initiation report end")


	def calibrate(self):

		"""Calibrates the eye tracking system
		
		arguments
		None
		
		keyword arguments
		None

		returns
		success	-- returns True if calibration succeeded, or False if
				   not; in addition a calibration log is added to the
				   log file and some properties are updated (i.e. the
				   thresholds for detection algorithms)
		"""
		
		# CALIBRATION
		# determine the calibration points
		calibpoints = []
		for x in [0.1,0.5,0.9]:
			for y in [0.1,0.5,0.9]:
				calibpoints.append((int(x*self.dispsize[0]),int(y*self.dispsize[1])))
		random.shuffle(calibpoints)
		
		# show a message
		self.screen.clear()
		self.screen.draw_text(text="Press Space to start the calibration or Q to quit.")
		self.disp.fill(self.screen)
		self.disp.show()
		
		# wait for keyboard input
		key, keytime = self.kb.get_key(keylist=['q','space'], timeout=None, flush=True)
		if key == 'q':
			quited = True
		else:
			quited = False
		
		# run until the user is statisfied, or quits
		calibrated = False
		calibresult = None
		while not quited and not calibrated:
			# start a new calibration
			self.eyetribe.calibration.start(pointcount=len(calibpoints))
			
			# loop through calibration points
			for cpos in calibpoints:
				self.draw_calibration_target(cpos[0], cpos[1])
				# wait for a bit to allow participant to start looking at
				# the calibration point (#TODO: space press?)
				clock.pause(1000)
				# start calibration of point
				self.eyetribe.calibration.pointstart(cpos[0],cpos[1])
				# wait for a second
				clock.pause(1000)
				# stop calibration of this point
				result = self.eyetribe.calibration.pointend()
				# the final calibration point returns a dict (does it?)
				if type(result) == dict:
					calibresult = copy.deepcopy(result)
				# check if the Q key has been pressed
				if self.kb.get_key(keylist=['q'],timeout=10,flush=False)[0] == 'q':
					# abort calibration
					self.eyetribe.calibration.abort()
					# set quited variable and break this for loop
					quited = True
					break
			
			# retry option if the calibration was aborted			
			if quited:
				# show retry message
				self.screen.clear()
				self.screen.draw_text("Calibration aborted. Press Space to restart, or 'Q' to quit.")
				self.disp.fill(self.screen)
				self.disp.show()
				# get input
				key, keytime = self.kb.get_key(keylist=['q','space'], timeout=None, flush=True)
				if key == 'space':
					# unset quited Boolean
					quited = False
				# skip further processing
				continue

			# get the calibration result if it was not obtained yet
			if type(calibresult) != dict:
				# empty display
				self.disp.fill()
				self.disp.show()
				# allow for a bit of calculation time
				clock.pause(2000)
				# get the result
				calibresult = self.eyetribe._tracker.get_calibresult()

			# results
			# clear the screen
			self.screen.clear()
			# draw results for each point
			if type(calibresult) == dict:
				for p in calibresult['calibpoints']:
					# only draw the point if data was obtained
					if p['state'] > 0:
						# draw the mean error
						self.screen.draw_circle(colour=(252,233,79), pos=(p['cpx'],p['cpy']), r=p['mepix'], pw=0, fill=True)
						# draw the point
						self.screen.draw_fixation(fixtype='dot', colour=(115,210,22), pos=(p['cpx'],p['cpy']))
						# draw the estimated point
						self.screen.draw_fixation(fixtype='dot', colour=(32,74,135), pos=(p['mecpx'],p['mecpy']))
						# annotate accuracy
						self.screen.draw_text(text=str(p['acd']), pos=(p['cpx']+10,p['cpy']+10), fontsize=12)
					# if no data was obtained, draw the point in red
					else:
						self.screen.draw_fixation(fixtype='dot', colour=(204,0,0), pos=(p['cpx'],p['cpy']))
				# draw box for averages
				self.screen.draw_rect(colour=(238,238,236), x=int(self.dispsize[0]*0.15), y=int(self.dispsize[1]*0.2), w=400, h=200, pw=0, fill=True)
				# draw result
				if calibresult['result']:
					self.screen.draw_text(text="calibration is successful", colour=(115,210,22), pos=(int(self.dispsize[0]*0.25),int(self.dispsize[1]*0.25)), fontsize=12)
				else:
					self.screen.draw_text(text="calibration failed", colour=(204,0,0), pos=(int(self.dispsize[0]*0.25),int(self.dispsize[1]*0.25)), fontsize=12)
				# draw average accuracy
				self.screen.draw_text(text="average error = %.2f degrees" % (calibresult['deg']), colour=(211,215,207), pos=(int(self.dispsize[0]*0.25),int(self.dispsize[1]*0.25+20)), fontsize=12)
				# draw input options
				self.screen.draw_text(text="Press Space to continue, or 'R' to restart.", colour=(211,215,207), pos=(int(self.dispsize[0]*0.25),int(self.dispsize[1]*0.25+40)), fontsize=12)
			else:
				self.screen.draw_text(text="Calibration failed, press 'R' to try again.")
			# show the results
			self.disp.fill(self.screen)
			self.disp.show()
			# wait for input
			key, keytime = self.kb.get_key(keylist=['space','r'], timeout=None, flush=True)
			# process input
			if key == 'space':
				calibrated = True

		# calibration failed if the user quited
		if quited:
			return False

		# NOISE CALIBRATION
		# get all error estimates (pixels)
		var = []
		for p in calibresult['calibpoints']:
			# only draw the point if data was obtained
			if p['state'] > 0:
				var.append(p['mepix'])
		noise = sum(var) / float(len(var))
		self.pxdsttresh = (noise, noise)
				
		# AFTERMATH
		# store some variables
		pixpercm = (self.dispsize[0]/float(self.screensize[0]) + self.dispsize[1]/float(self.screensize[1])) / 2
		screendist = SCREENDIST
		# calculate thresholds based on tracker settings
		self.accuracy = ((calibresult['Ldeg'],calibresult['Ldeg']), (calibresult['Rdeg'],calibresult['Rdeg'])) 
		self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm)
		self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm)
		self.pxaccuracy = ((deg2pix(screendist, self.accuracy[0][0], pixpercm),deg2pix(screendist, self.accuracy[0][1], pixpercm)), (deg2pix(screendist, self.accuracy[1][0], pixpercm),deg2pix(screendist, self.accuracy[1][1], pixpercm)))
		self.pxspdtresh = deg2pix(screendist, self.spdtresh/1000.0, pixpercm) # in pixels per millisecond
		self.pxacctresh = deg2pix(screendist, self.accthresh/1000.0, pixpercm) # in pixels per millisecond**2

		# calibration report
		self.log("pygaze calibration report start")
		self.log("accuracy (degrees): LX=%s, LY=%s, RX=%s, RY=%s" % (self.accuracy[0][0],self.accuracy[0][1],self.accuracy[1][0],self.accuracy[1][1]))
		self.log("accuracy (in pixels): LX=%s, LY=%s, RX=%s, RY=%s" % (self.pxaccuracy[0][0],self.pxaccuracy[0][1],self.pxaccuracy[1][0],self.pxaccuracy[1][1]))
		self.log("precision (RMS noise in pixels): X=%s, Y=%s" % (self.pxdsttresh[0],self.pxdsttresh[1]))
		self.log("distance between participant and display: %s cm" % screendist)
		self.log("fixation threshold: %s pixels" % self.pxfixtresh)
		self.log("speed threshold: %s pixels/ms" % self.pxspdtresh)
		self.log("acceleration threshold: %s pixels/ms**2" % self.pxacctresh)
		self.log("pygaze calibration report end")

		return True


	def close(self):

		"""Neatly close connection to tracker
		
		arguments
		None
		
		returns
		Nothing	-- saves data and sets self.connected to False
		"""

		# close connection
		self.eyetribe.close()
		self.connected = False		


	def connected(self):

		"""Checks if the tracker is connected
		
		arguments
		None
		
		returns
		connected	-- True if connection is established, False if not;
				   sets self.connected to the same value
		"""

		res = self.eyetribe._tracker.get_trackerstate()

		if res == 0:
			self.connected = True
		else:
			self.connected = False

		return self.connected


	def drift_correction(self, pos=None, fix_triggered=False):

		"""Performs a drift check
		
		arguments
		None
		
		keyword arguments
		pos			-- (x, y) position of the fixation dot or None for
					   a central fixation (default = None)
		fix_triggered	-- Boolean indicating if drift check should be
					   performed based on gaze position (fix_triggered
					   = True) or on spacepress (fix_triggered = 
					   False) (default = False)
		
		returns
		checked		-- Boolaan indicating if drift check is ok (True)
					   or not (False); or calls self.calibrate if 'q'
					   or 'escape' is pressed
		"""
		
		if pos == None:
			pos = self.dispsize[0] / 2, self.dispsize[1] / 2
		if fix_triggered:
			return self.fix_triggered_drift_correction(pos)		
		self.draw_drift_correction_target(pos[0], pos[1])
		pressed = False
		while not pressed:
			pressed, presstime = self.kb.get_key()
			if pressed:
				if pressed == 'escape' or pressed == 'q':
					print("libeyetribe.EyeTribeTracker.drift_correction: 'q' or 'escape' pressed")
					return self.calibrate()
				gazepos = self.sample()
				if ((gazepos[0]-pos[0])**2  + (gazepos[1]-pos[1])**2)**0.5 < self.pxerrdist:
					return True
				else:
					self.errorbeep.play()
		return False
		
	def draw_drift_correction_target(self, x, y):
		
		"""
		Draws the drift-correction target.
		
		arguments
		
		x		--	The X coordinate
		y		--	The Y coordinate
		"""
		
		self.screen.clear()
		self.screen.draw_fixation(fixtype='dot', colour=FGC, pos=(x,y), pw=0,
			diameter=12)
		self.disp.fill(self.screen)
		self.disp.show()			
		
	def draw_calibration_target(self, x, y):
		
		self.draw_drift_correction_target(x, y)

	def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30):

		"""Performs a fixation triggered drift correction by collecting
		a number of samples and calculating the average distance from the
		fixation position
		
		arguments
		None
		
		keyword arguments
		pos			-- (x, y) position of the fixation dot or None for
					   a central fixation (default = None)
		min_samples		-- minimal amount of samples after which an
					   average deviation is calculated (default = 10)
		max_dev		-- maximal deviation from fixation in pixels
					   (default = 60)
		reset_threshold	-- if the horizontal or vertical distance in
					   pixels between two consecutive samples is
					   larger than this threshold, the sample
					   collection is reset (default = 30)
		
		returns
		checked		-- Boolaan indicating if drift check is ok (True)
					   or not (False); or calls self.calibrate if 'q'
					   or 'escape' is pressed
		"""

		self.draw_drift_correction_target(pos[0], pos[1])
		if pos == None:
			pos = self.dispsize[0] / 2, self.dispsize[1] / 2

		# loop until we have sufficient samples
		lx = []
		ly = []
		while len(lx) < min_samples:

			# pressing escape enters the calibration screen
			if self.kb.get_key()[0] in ['escape','q']:
				print("libeyetribe.EyeTribeTracker.fix_triggered_drift_correction: 'q' or 'escape' pressed")
				return self.calibrate()

			# collect a sample
			x, y = self.sample()

			if len(lx) == 0 or x != lx[-1] or y != ly[-1]:

				# if present sample deviates too much from previous sample, reset counting
				if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold):
					lx = []
					ly = []

				# collect samples
				else:
					lx.append(x)
					ly.append(y)

			if len(lx) == min_samples:

				avg_x = sum(lx) / len(lx)
				avg_y = sum(ly) / len(ly)
				d = ((avg_x - pos[0]) ** 2 + (avg_y - pos[1]) ** 2)**0.5

				if d < max_dev:
					return True
				else:
					lx = []
					ly = []			

	def get_eyetracker_clock_async(self):

		"""Not supported for EyeTribeTracker (yet)"""

		print("function not supported yet")


	def log(self, msg):

		"""Writes a message to the log file
		
		arguments
		ms		-- a string to include in the log file
		
		returns
		Nothing	-- uses native log function of iViewX to include a line
				   in the log file
		"""

		self.eyetribe.log_message(msg)


	def log_var(self, var, val):

		"""Writes a variable to the log file
		
		arguments
		var		-- variable name
		val		-- variable value
		
		returns
		Nothing	-- uses native log function of iViewX to include a line
				   in the log file in a "var NAME VALUE" layout
		"""

		msg = "var %s %s" % (var, val)

		self.log(msg)


	def prepare_drift_correction(self, pos):

		"""Not supported for EyeTribeTracker (yet)"""

		print("function not supported yet")


	def pupil_size(self):

		"""Return pupil size
		
		arguments
		None
		
		returns
		pupil size	-- returns pupil diameter for the eye that is currently
				   being tracked (as specified by self.eye_used) or -1
				   when no data is obtainable
		"""
		
		# get newest pupil size
		ps = self.eyetribe.pupil_size()
		
		# invalid data
		if ps == None:
			return -1
		
		# check if the new pupil size is the same as the previous
		if ps != self.prevps:
			# update the pupil size
			self.prevps = copy.copy(ps)
		
		return self.prevps


	def sample(self):

		"""Returns newest available gaze position
		
		arguments
		None
		
		returns
		sample	-- an (x,y) tuple or a (-1,-1) on an error
		"""

		# get newest sample
		s = self.eyetribe.sample()
		
		# invalid data
		if s == (None,None):
			return (-1,-1)
		
		# check if the new sample is the same as the previous
		if s != self.prevsample:
			# update the current sample
			self.prevsample = copy.copy(s)
		
		return self.prevsample


	def send_command(self, cmd):

		"""Sends a command to the eye tracker
		
		arguments
		cmd		--	the command to be sent to the EyeTribe, which should
					be a list with the following information:
						[category, request, values]
		
		returns
		Nothing
		"""

		self.eyetribe._connection.request(cmd)


	def start_recording(self):

		"""Starts recording eye position
		
		arguments
		None
		
		returns
		Nothing	-- sets self.recording to True when recording is
				   successfully started
		"""

		self.eyetribe.start_recording()
		self.recording = True


	def status_msg(self, msg):

		"""Not supported for EyeTribeTracker (yet)"""

		print("function not supported yet")


	def stop_recording(self):

		"""Stop recording eye position
		
		arguments
		None
		
		returns
		Nothing	-- sets self.recording to False when recording is
				   successfully started
		"""

		self.eyetribe.stop_recording()
		self.recording = False
	
	
	def set_detection_type(self, eventdetection):
		
		"""Set the event detection type to either PyGaze algorithms, or
		native algorithms as provided by the manufacturer (only if
		available: detection type will default to PyGaze if no native
		functions are available)
		
		arguments
		eventdetection	--	a string indicating which detection type
						should be employed: either 'pygaze' for
						PyGaze event detection algorithms or
						'native' for manufacturers algorithms (only
						if available; will default to 'pygaze' if no
						native event detection is available)
		returns		--	detection type for saccades, fixations and
						blinks in a tuple, e.g. 
						('pygaze','native','native') when 'native'
						was passed, but native detection was not
						available for saccade detection
		"""
		
		if eventdetection in ['pygaze','native']:
			self.eventdetection = eventdetection
		
		return ('pygaze','pygaze','pygaze')


	def wait_for_event(self, event):

		"""Waits for event
		
		arguments
		event		-- an integer event code, one of the following:
					3 = STARTBLINK
					4 = ENDBLINK
					5 = STARTSACC
					6 = ENDSACC
					7 = STARTFIX
					8 = ENDFIX
		
		returns
		outcome	-- a self.wait_for_* method is called, depending on the
				   specified event; the return values of corresponding
				   method are returned
		"""

		if event == 5:
			outcome = self.wait_for_saccade_start()
		elif event == 6:
			outcome = self.wait_for_saccade_end()
		elif event == 7:
			outcome = self.wait_for_fixation_start()
		elif event == 8:
			outcome = self.wait_for_fixation_end()
		elif event == 3:
			outcome = self.wait_for_blink_start()
		elif event == 4:
			outcome = self.wait_for_blink_end()
		else:
			raise Exception("Error in libsmi.SMItracker.wait_for_event: eventcode %s is not supported" % event)

		return outcome


	def wait_for_blink_end(self):

		"""Waits for a blink end and returns the blink ending time
		
		arguments
		None
		
		returns
		timestamp		--	blink ending time in milliseconds, as
						measured from experiment begin time
		"""

		
		# # # # #
		# EyeTribe method

		if self.eventdetection == 'native':
			
			# print warning, since EyeTribe does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but EyeTribe does not offer blink detection; PyGaze algorithm \
				will be used")

		# # # # #
		# PyGaze method
		
		blinking = True
		
		# loop while there is a blink
		while blinking:
			# get newest sample
			gazepos = self.sample()
			# check if it's valid
			if self.is_valid_sample(gazepos):
				# if it is a valid sample, blinking has stopped
				blinking = False
		
		# return timestamp of blink end
		return clock.get_time()		
		

	def wait_for_blink_start(self):

		"""Waits for a blink start and returns the blink starting time
		
		arguments
		None
		
		returns
		timestamp		--	blink starting time in milliseconds, as
						measured from experiment begin time
		"""
		
		# # # # #
		# EyeTribe method

		if self.eventdetection == 'native':
			
			# print warning, since EyeTribe does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but EyeTribe does not offer blink detection; PyGaze algorithm \
				will be used")

		# # # # #
		# PyGaze method
		
		blinking = False
		
		# loop until there is a blink
		while not blinking:
			# get newest sample
			gazepos = self.sample()
			# check if it's a valid sample
			if not self.is_valid_sample(gazepos):
				# get timestamp for possible blink start
				t0 = clock.get_time()
				# loop until a blink is determined, or a valid sample occurs
				while not self.is_valid_sample(self.sample()):
					# check if time has surpassed 150 ms
					if clock.get_time()-t0 >= 150:
						# return timestamp of blink start
						return t0
		

	def wait_for_fixation_end(self):

		"""Returns time and gaze position when a fixation has ended;
		function assumes that a 'fixation' has ended when a deviation of
		more than self.pxfixtresh from the initial fixation position has
		been detected (self.pxfixtresh is created in self.calibration,
		based on self.fixtresh, a property defined in self.__init__)
		
		arguments
		None
		
		returns
		time, gazepos	-- time is the starting time in milliseconds (from
					   expstart), gazepos is a (x,y) gaze position
					   tuple of the position from which the fixation
					   was initiated
		"""

		# # # # #
		# EyeTribe method

		if self.eventdetection == 'native':
			
			# print warning, since EyeTribe does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but EyeTribe does not offer fixation detection; \
				PyGaze algorithm will be used")

		# # # # #
		# PyGaze method
			
		# function assumes that a 'fixation' has ended when a deviation of more than fixtresh
		# from the initial 'fixation' position has been detected
		
		# get starting time and position
		stime, spos = self.wait_for_fixation_start()
		
		# loop until fixation has ended
		while True:
			# get new sample
			npos = self.sample() # get newest sample
			# check if sample is valid
			if self.is_valid_sample(npos):
				# check if sample deviates to much from starting position
				if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras
					# break loop if deviation is too high
					break

		return clock.get_time(), spos


	def wait_for_fixation_start(self):

		"""Returns starting time and position when a fixation is started;
		function assumes a 'fixation' has started when gaze position
		remains reasonably stable (i.e. when most deviant samples are
		within self.pxfixtresh) for five samples in a row (self.pxfixtresh
		is created in self.calibration, based on self.fixtresh, a property
		defined in self.__init__)
		
		arguments
		None
		
		returns
		time, gazepos	-- time is the starting time in milliseconds (from
					   expstart), gazepos is a (x,y) gaze position
					   tuple of the position from which the fixation
					   was initiated
		"""
		
		# # # # #
		# EyeTribe method

		if self.eventdetection == 'native':
			
			# print warning, since EyeTribe does not have a fixation start
			# detection built into their API (only ending)
			
			print("WARNING! 'native' event detection has been selected, \
				but EyeTribe does not offer fixation detection; \
				PyGaze algorithm will be used")
			
			
		# # # # #
		# PyGaze method
		
		# function assumes a 'fixation' has started when gaze position
		# remains reasonably stable for self.fixtimetresh
		
		# get starting position
		spos = self.sample()
		while not self.is_valid_sample(spos):
			spos = self.sample()
		
		# get starting time
		t0 = clock.get_time()

		# wait for reasonably stable position
		moving = True
		while moving:
			# get new sample
			npos = self.sample()
			# check if sample is valid
			if self.is_valid_sample(npos):
				# check if new sample is too far from starting position
				if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras
					# if not, reset starting position and time
					spos = copy.copy(npos)
					t0 = clock.get_time()
				# if new sample is close to starting sample
				else:
					# get timestamp
					t1 = clock.get_time()
					# check if fixation time threshold has been surpassed
					if t1 - t0 >= self.fixtimetresh:
						# return time and starting position
						return t1, spos


	def wait_for_saccade_end(self):

		"""Returns ending time, starting and end position when a saccade is
		ended; based on Dalmaijer et al. (2013) online saccade detection
		algorithm
		
		arguments
		None
		
		returns
		endtime, startpos, endpos	-- endtime in milliseconds (from 
							   expbegintime); startpos and endpos
							   are (x,y) gaze position tuples
		"""

		# # # # #
		# EyeTribe method

		if self.eventdetection == 'native':
			
			# print warning, since EyeTribe does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but EyeTribe does not offer saccade detection; PyGaze \
				algorithm will be used")

		# # # # #
		# PyGaze method
		
		# get starting position (no blinks)
		t0, spos = self.wait_for_saccade_start()
		# get valid sample
		prevpos = self.sample()
		while not self.is_valid_sample(prevpos):
			prevpos = self.sample()
		# get starting time, intersample distance, and velocity
		t1 = clock.get_time()
		s = ((prevpos[0]-spos[0])**2 + (prevpos[1]-spos[1])**2)**0.5 # = intersample distance = speed in px/sample
		v0 = s / (t1-t0)

		# run until velocity and acceleration go below threshold
		saccadic = True
		while saccadic:
			# get new sample
			newpos = self.sample()
			t1 = clock.get_time()
			if self.is_valid_sample(newpos) and newpos != prevpos:
				# calculate distance
				s = ((newpos[0]-prevpos[0])**2 + (newpos[1]-prevpos[1])**2)**0.5 # = speed in pixels/sample
				# calculate velocity
				v1 = s / (t1-t0)
				# calculate acceleration
				a = (v1-v0) / (t1-t0) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample)
				# check if velocity and acceleration are below threshold
				if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and a < 0):
					saccadic = False
					epos = newpos[:]
					etime = clock.get_time()
				# update previous values
				t0 = copy.copy(t1)
				v0 = copy.copy(v1)
			# udate previous sample
			prevpos = newpos[:]

		return etime, spos, epos


	def wait_for_saccade_start(self):

		"""Returns starting time and starting position when a saccade is
		started; based on Dalmaijer et al. (2013) online saccade detection
		algorithm
		
		arguments
		None
		
		returns
		endtime, startpos	-- endtime in milliseconds (from expbegintime);
					   startpos is an (x,y) gaze position tuple
		"""

		# # # # #
		# EyeTribe method

		if self.eventdetection == 'native':
			
			# print warning, since EyeTribe does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but EyeTribe does not offer saccade detection; PyGaze \
				algorithm will be used")

		# # # # #
		# PyGaze method
		
		# get starting position (no blinks)
		newpos = self.sample()
		while not self.is_valid_sample(newpos):
			newpos = self.sample()
		# get starting time, position, intersampledistance, and velocity
		t0 = clock.get_time()
		prevpos = newpos[:]
		s = 0
		v0 = 0

		# get samples
		saccadic = False
		while not saccadic:
			# get new sample
			newpos = self.sample()
			t1 = clock.get_time()
			if self.is_valid_sample(newpos) and newpos != prevpos:
				# check if distance is larger than precision error
				sx = newpos[0]-prevpos[0]; sy = newpos[1]-prevpos[1]
				if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 > self.weightdist: # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise
					# calculate distance
					s = ((sx)**2 + (sy)**2)**0.5 # intersampledistance = speed in pixels/ms
					# calculate velocity
					v1 = s / (t1-t0)
					# calculate acceleration
					a = (v1-v0) / (t1-t0) # acceleration in pixels/ms**2
					# check if either velocity or acceleration are above threshold values
					if v1 > self.pxspdtresh or a > self.pxacctresh:
						saccadic = True
						spos = prevpos[:]
						stime = clock.get_time()
					# update previous values
					t0 = copy.copy(t1)
					v0 = copy.copy(v1)

				# udate previous sample
				prevpos = newpos[:]

		return stime, spos
	
	
	def is_valid_sample(self, gazepos):
		
		"""Checks if the sample provided is valid, based on EyeTribe specific
		criteria (for internal use)
		
		arguments
		gazepos		--	a (x,y) gaze position tuple, as returned by
						self.sample()
		
		returns
		valid		--	a Boolean: True on a valid sample, False on
						an invalid sample
		"""
		
		# return False if a sample is invalid
		if gazepos == (None,None) or gazepos == (-1,-1):
			return False
		
		# in any other case, the sample is valid
		return True
Example #29
0
class Dummy(DumbDummy):
    """A dummy class to run experiments in dummy mode, where eye movements are simulated by the mouse"""
    def __init__(self, display):
        """Initiates an eyetracker dummy object, that simulates gaze position using the mouse
        
        arguments
        display        --    a pygaze display.Display instance
        
        keyword arguments
        None
        """

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, Dummy)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        self.recording = False
        self.blinking = False
        self.bbpos = (settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2)
        self.resolution = settings.DISPSIZE[:]
        self.simulator = Mouse(disptype=settings.DISPTYPE,
                               mousebuttonlist=None,
                               timeout=2,
                               visible=False)
        self.kb = Keyboard(disptype=settings.DISPTYPE,
                           keylist=None,
                           timeout=None)
        self.angrybeep = Sound(osc='saw',
                               freq=100,
                               length=100,
                               attack=0,
                               decay=0,
                               soundfile=None)
        self.display = display
        self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False)

    def calibrate(self):
        """Dummy calibration"""

        print("Calibration would now take place")
        clock.pause(1000)

    def drift_correction(self, pos=None, fix_triggered=False):
        """Dummy drift correction"""

        print("Drift correction would now take place")

        if fix_triggered:
            return self.fix_triggered_drift_correction(pos)

        if pos == None:
            pos = settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2

        # show mouse
        self.simulator.set_visible(visible=True)

        # show fixation dot
        self.draw_drift_correction_target(pos[0], pos[1])

        # perform drift check
        errdist = 60  # pixels (on a 1024x768px and 39.9x29.9cm monitor at 67 cm, this is about 2 degrees of visual angle)
        pressed = None
        while True:
            # check for keyboard input
            pressed, presstime = self.kb.get_key(
                keylist=['q', 'escape', 'space'], timeout=1)

            # quit key
            if pressed in ['q', 'escape']:
                # hide mouse
                self.simulator.set_visible(visible=False)
                return False

            # space bar
            elif pressed == 'space':
                # get sample
                gazepos = self.sample()
                # sample is close enough to fixation dot
                if ((gazepos[0] - pos[0])**2 +
                    (gazepos[1] - pos[1])**2)**0.5 < errdist:
                    # hide mouse
                    self.simulator.set_visible(visible=False)
                    return True
                # sample is NOT close enough to fixation dot
                else:
                    # show discontent
                    self.angrybeep.play()

    def fix_triggered_drift_correction(self,
                                       pos=None,
                                       min_samples=30,
                                       max_dev=60,
                                       reset_threshold=10):
        """Dummy drift correction (fixation triggered)"""

        print("Drift correction (fixation triggered) would now take place")

        if pos == None:
            pos = settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2

        # show mouse
        self.simulator.set_visible(visible=True)

        # show fixation dot
        self.draw_drift_correction_target(pos[0], pos[1])

        while True:
            # loop until we have sufficient samples
            lx = []
            ly = []
            while len(lx) < min_samples:

                # pressing escape enters the calibration screen
                if self.kb.get_key(keylist=["escape", "q"],
                                   timeout=0)[0] != None:
                    self.recording = False
                    print(
                        "libeyetracker.libeyetracker.fix_triggered_drift_correction(): 'q' pressed"
                    )
                    self.simulator.set_visible(visible=False)
                    return False

                # collect a sample
                x, y = self.sample()

                if len(lx) == 0 or x != lx[-1] or y != ly[-1]:

                    # if present sample deviates too much from previous sample, reset counting
                    if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold
                                        or abs(y - ly[-1]) > reset_threshold):
                        lx = []
                        ly = []

                    # collect samples
                    else:
                        lx.append(x)
                        ly.append(y)

                # check if samples are within max. deviation
                if len(lx) == min_samples:

                    avg_x = sum(lx) / len(lx)
                    avg_y = sum(ly) / len(ly)
                    d = ((avg_x - pos[0])**2 + (avg_y - pos[1])**2)**0.5

                    if d < max_dev:
                        self.simulator.set_visible(visible=False)
                        return True
                    else:
                        lx = []
                        ly = []

    def start_recording(self):
        """Dummy for starting recording, prints what would have been the recording start"""

        self.simulator.set_visible(visible=True)
        dumrectime = clock.get_time()

        self.recording = True

        print("Recording would have started at: " + str(dumrectime))

    def stop_recording(self):
        """Dummy for stopping recording, prints what would have been the recording end"""

        self.simulator.set_visible(visible=False)
        dumrectime = clock.get_time()

        self.recording = False

        print("Recording would have stopped at: " + str(dumrectime))

    def close(self):
        """Dummy for closing connection with eyetracker, prints what would have been connection closing time"""

        if self.recording:
            self.stop_recording()

        closetime = clock.get_time()

        print("eyetracker connection would have closed at: " + str(closetime))

    def pupil_size(self):
        """Returns dummy pupil size"""

        return 19

    def sample(self):
        """Returns simulated gaze position (=mouse position)"""

        if self.blinking:
            if self.simulator.get_pressed()[2]:  # buttondown
                self.simulator.set_pos(pos=(
                    self.bbpos[0],
                    self.resolution[1]))  # set position to blinking position
            elif not self.simulator.get_pressed()[2]:  # buttonup
                self.simulator.set_pos(
                    pos=self.bbpos)  # set position to position before blinking
                self.blinking = False  # 'blink' stopped

        elif not self.blinking:
            if self.simulator.get_pressed()[2]:  # buttondown
                self.blinking = True  # 'blink' started
                self.bbpos = self.simulator.get_pos(
                )  # position before blinking
                self.simulator.set_pos(pos=(
                    self.bbpos[0],
                    self.resolution[1]))  # set position to blinking position

        return self.simulator.get_pos()

    def wait_for_saccade_start(self):
        """Returns starting time and starting position when a simulated saccade is started"""

        # function assumes that a 'saccade' has been started when a deviation of more than
        # maxerr from the initial 'gaze' position has been detected (using Pythagoras, ofcourse)

        spos = self.sample()  # starting position
        maxerr = 3  # pixels
        while True:
            npos = self.sample()  # get newest sample
            if ((spos[0] - npos[0])**2 +
                (spos[1] - npos[1])**2)**0.5 > maxerr:  # Pythagoras
                break

        return clock.get_time(), spos

    def wait_for_saccade_end(self):
        """Returns ending time, starting and end position when a simulated saccade is ended"""

        # function assumes that a 'saccade' has ended when 'gaze' position remains reasonably
        # (i.e.: within maxerr) stable for five samples
        # for saccade start algorithm, see wait_for_fixation_start

        stime, spos = self.wait_for_saccade_start()
        maxerr = 3  # pixels

        # wait for reasonably stable position
        xl = []  # list for last five samples (x coordinate)
        yl = []  # list for last five samples (y coordinate)
        moving = True
        while moving:
            # check positions
            npos = self.sample()
            xl.append(npos[0])  # add newest sample
            yl.append(npos[1])  # add newest sample
            if len(xl) == 5:
                # check if deviation is small enough
                if max(xl) - min(xl) < maxerr and max(yl) - min(yl) < maxerr:
                    moving = False
                # remove oldest sample
                xl.pop(0)
                yl.pop(0)
            # wait for a bit, to avoid immediately returning (runs go faster than mouse moves)
            clock.pause(10)

        return clock.get_time(), spos, (xl[len(xl) - 1], yl[len(yl) - 1])

    def wait_for_fixation_start(self):
        """Returns starting time and position when a simulated fixation is started"""

        # function assumes a 'fixation' has started when 'gaze' position remains reasonably
        # stable for five samples in a row (same as saccade end)

        maxerr = 3  # pixels

        # wait for reasonably stable position
        xl = []  # list for last five samples (x coordinate)
        yl = []  # list for last five samples (y coordinate)
        moving = True
        while moving:
            npos = self.sample()
            xl.append(npos[0])  # add newest sample
            yl.append(npos[1])  # add newest sample
            if len(xl) == 5:
                # check if deviation is small enough
                if max(xl) - min(xl) < maxerr and max(yl) - min(yl) < maxerr:
                    moving = False
                # remove oldest sample
                xl.pop(0)
                yl.pop(0)
            # wait for a bit, to avoid immediately returning (runs go faster than mouse moves)
            clock.pause(10)

        return clock.get_time(), (xl[len(xl) - 1], yl[len(yl) - 1])

    def wait_for_fixation_end(self):
        """Returns time and gaze position when a simulated fixation is ended"""

        # function assumes that a 'fixation' has ended when a deviation of more than maxerr
        # from the initial 'fixation' position has been detected (using Pythagoras, ofcourse)

        stime, spos = self.wait_for_fixation_start()
        maxerr = 3  # pixels

        while True:
            npos = self.sample()  # get newest sample
            if ((spos[0] - npos[0])**2 +
                (spos[1] - npos[1])**2)**0.5 > maxerr:  # Pythagoras
                break

        return clock.get_time(), spos

    def wait_for_blink_start(self):
        """Returns starting time and position of a simulated blink (mousebuttondown)"""

        # blinks are simulated with mouseclicks: a right mouseclick simulates the closing
        # of the eyes, a mousebuttonup the opening.

        while not self.blinking:
            pos = self.sample()

        return clock.get_time(), pos

    def wait_for_blink_end(self):
        """Returns ending time and position of a simulated blink (mousebuttonup)"""

        # blinks are simulated with mouseclicks: a right mouseclick simulates the closing
        # of the eyes, a mousebuttonup the opening.

        # wait for blink start
        while not self.blinking:
            spos = self.sample()
        # wait for blink end
        while self.blinking:
            epos = self.sample()

        return clock.get_time(), epos

    def set_draw_drift_correction_target_func(self, func):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        self.draw_drift_correction_target = func

    # ***
    #
    # Internal functions below
    #
    # ***

    def draw_drift_correction_target(self, x, y):
        """
        Draws the drift-correction target.
        
        arguments
        
        x        --    The X coordinate
        y        --    The Y coordinate
        """

        self.screen.clear()
        self.screen.draw_fixation(fixtype='dot', colour=settings.FGC, \
            pos=(x,y), pw=0, diameter=12)
        self.display.fill(self.screen)
        self.display.show()
Example #30
0
	def __init__(self, display, logfile=LOGFILE, eventdetection=EVENTDETECTION, \
		saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, \
		**args):

		"""Initializes the EyeTribeTracker object
		
		arguments
		display	-- a pygaze.display.Display instance
		
		keyword arguments
		logfile	-- logfile name (string value); note that this is the
				   name for the eye data log file (default = LOGFILE)
		"""

		# try to copy docstrings (but ignore it if it fails, as we do
		# not need it for actual functioning of the code)
		try:
			copy_docstr(BaseEyeTracker, EyeTribeTracker)
		except:
			# we're not even going to show a warning, since the copied
			# docstring is useful for code editors; these load the docs
			# in a non-verbose manner, so warning messages would be lost
			pass

		# object properties
		self.disp = display
		self.screen = Screen()
		self.dispsize = DISPSIZE # display size in pixels
		self.screensize = SCREENSIZE # display size in cm
		self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
		self.errorbeep = Sound(osc='saw',freq=100, length=100)
		
		# output file properties
		self.outputfile = logfile
		
		# eye tracker properties
		self.connected = False
		self.recording = False
		self.errdist = 2 # degrees; maximal error for drift correction
		self.pxerrdist = 30 # initial error in pixels
		self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
		self.prevsample = (-1,-1)
		self.prevps = -1
		
		# event detection properties
		self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
		self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
		self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold
		self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold
		self.eventdetection = eventdetection
		self.set_detection_type(self.eventdetection)
		self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

		# connect to the tracker
		self.eyetribe = EyeTribe(logfilename=logfile)

		# get info on the sample rate
		self.samplerate = self.eyetribe._samplefreq
		self.sampletime = 1000.0 * self.eyetribe._intsampletime

		# initiation report
		self.log("pygaze initiation report start")
		self.log("display resolution: %sx%s" % (self.dispsize[0],self.dispsize[1]))
		self.log("display size in cm: %sx%s" % (self.screensize[0],self.screensize[1]))
		self.log("samplerate: %.2f Hz" % self.samplerate)
		self.log("sampletime: %.2f ms" % self.sampletime)
		self.log("fixation threshold: %s degrees" % self.fixtresh)
		self.log("speed threshold: %s degrees/second" % self.spdtresh)
		self.log("acceleration threshold: %s degrees/second**2" % self.accthresh)
		self.log("pygaze initiation report end")
Example #31
0
kb = Keyboard()

# Initialise the EyeTracker and let it know which Display instance to use by
# passing it to the EyeTracker.
tracker = EyeTracker(disp)

# Create a Logfile instance that keeps track of when videos start.
log = Logfile()
# Write a header to the log file.
log.write(['date', 'time', 'trialnr', 'video', 'timestamp'])

# # # # #
# SCREENS

# Create a screen to show instructions on.
textscr = Screen()
textscr.draw_text("Press any key to start the next video.", fontsize=24)

# Create a screen to show images on. This will be the screen that we will use
# to display each video frame.
stimscr = Screen()

# # # # #
# PLAY VIDEOS

# Calibrate the eye tracker.
tracker.calibrate()

# Randomise the list of videos. Remove this line if you want the videos to be
# displayed in alphabetical order.
random.shuffle(VIDEOS)
Example #32
0
from pygaze.display import Display
from pygaze.screen import Screen
from pygaze.eyetracker import EyeTracker
import pygaze.libtime as timer

disp = Display()
scr = Screen()

scr.draw_text("Preparing experiment...", fontsize=20)
disp.fill(scr)
disp.show()

tracker = EyeTracker(disp)
tracker.calibrate()

tracker.start_recording()
t0 = timer.get_time()
while timer.get_time() - t0 < 5000:
    gazepos = tracker.sample()
    scr.clear()
    scr.draw_fixation(fixtype='dot', pos=gazepos)
    disp.fill(scr)
    disp.show()

tracker.stop_recording()
tracker.close()

disp.close()
Example #33
0
	# set up function argument types and return type
	XInputSetState = xinput.XInputSetState
	XInputSetState.argtypes = [ctypes.c_uint, ctypes.POINTER(XINPUT_VIBRATION)]
	XInputSetState.restype = ctypes.c_uint
	# define helper function
	def set_vibration(controller, left_motor, right_motor):
	    vibration = XINPUT_VIBRATION(int(left_motor * 65535), int(right_motor * 65535))
	    XInputSetState(controller, ctypes.byref(vibration))


# # # # #
# PYGAZE INSTANCES

# visual
disp = Display()
scr = Screen()
# input
js = Joystick()


# # # # #
# RUN

# run until a minute has passed
t0 = timer.get_time()
t1 = timer.get_time()
text = "Test the joystick!"
while t1 - t0 < 60000:
	# get joystick input
	event, value, t1 = js.get_joyinput(timeout=10)
	# update text
Example #34
0

# # # # #
# directory stuff

DIR = os.path.split(os.path.abspath(__file__))[0]
image_file = os.path.join(DIR, 'www.google.co.uk_(Pixel 2).png')

# # # # #
# create instances

# initialize the display
disp = Display()

# initialize a screen
scr = Screen()


# initialize an EyeTracker
tracker = EyeTracker(disp)

# initialize a keyboard
kb = Keyboard(keylist=['space'],timeout=None)

# initialize a Timer
timer = Time()

# create a new logfile
log = Logfile(filename="test")
log.write(["x_pos","y_pos", "time"])
Example #35
0
from pygaze.display import Display
import pygaze.libtime as timer
from pygaze.screen import Screen
from constants import *

fixscreen = Screen()
fixscreen.draw_fixation(fixtype='dot')

disp = Display()
timer.pause(2000)
disp.close()
Example #36
0
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 10 13:29:48 2016

@author: adam
"""

from pygaze.display import Display
from pygaze.screen import Screen
import pygaze.libtime as timer

# disp = Window(size=DISPSIZE, units='pix', fullscr=True)
disp = Display()
fixscreen = Screen()
fixscreen.draw_fixation(fixtype='dot')

imgscreen = Screen()
imgscreen.draw_image('/home/adam/Desktop/experiment0/Example.png')

disp.fill(fixscreen)
disp.show()
timer.pause(1000)

disp.fill(imgscreen)
disp.show()
timer.pause(2000)

disp.close()
Example #37
0
    def __init__(self,
                 display,
                 resolution=DISPSIZE,
                 data_file=LOGFILENAME + ".edf",
                 fg_color=FGC,
                 bg_color=BGC,
                 eventdetection=EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 force_drift_correct=True,
                 pupil_size_mode=EYELINKPUPILSIZEMODE,
                 **args):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # try to import copy docstring (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, libeyelink)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        global _eyelink

        # Make sure that we have a valid data file. The local_data_file may
        # contain a folder. The eyelink_data_file is only a basename, i.e.
        # without folder. The eyelink_data_file must be at most eight characters
        # and end with a `.edf` extension.
        self.local_data_file = data_file
        self.eyelink_data_file = os.path.basename(data_file)
        stem, ext = os.path.splitext(self.eyelink_data_file)
        if len(stem) > 8 or ext.lower() != '.edf':
            raise Exception(
                "The EyeLink cannot handle filenames longer than eight "
                "characters (excluding '.edf' extension).")

        # properties
        self.display = display
        self.fontsize = 18
        self.scr = Screen(disptype=DISPTYPE, mousevisible=False)
        self.kb = Keyboard(keylist=["escape", "q"], timeout=1)
        self.resolution = resolution
        self.recording = False
        self.saccade_velocity_treshold = saccade_velocity_threshold
        self.saccade_acceleration_treshold = saccade_acceleration_threshold
        self.eye_used = None
        self.left_eye = 0
        self.right_eye = 1
        self.binocular = 2
        self.pupil_size_mode = pupil_size_mode
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        # degrees; maximal distance from fixation start (if gaze wanders beyond
        # this, fixation has stopped)
        self.fixtresh = 1.5
        # milliseconds; amount of time gaze has to linger within self.fixtresh
        # to be marked as a fixation
        self.fixtimetresh = 100
        # degrees per second; saccade velocity threshold
        self.spdtresh = self.saccade_velocity_treshold
        # degrees per second**2; saccade acceleration threshold
        self.accthresh = self.saccade_acceleration_treshold
        self.set_detection_type(eventdetection)
        # weighted distance, used for determining whether a movement is due to
        # measurement error (1 is ok, higher is more conservative and will
        # result in only larger saccades to be detected)
        self.weightdist = 10
        # distance between participant and screen in cm
        self.screendist = SCREENDIST
        # distance between participant and screen in cm
        self.screensize = SCREENSIZE
        self.pixpercm = (self.resolution[0]/float(self.screensize[0]) + \
         self.resolution[1]/float(self.screensize[1])) / 2.0
        # only initialize eyelink once
        if _eyelink == None:
            try:
                _eyelink = pylink.EyeLink()
            except:
                raise Exception(
                    "Error in libeyelink.libeyelink.__init__(): Failed to "
                    "connect to the tracker!")
        # determine software version of tracker
        self.tracker_software_ver = 0
        self.eyelink_ver = pylink.getEYELINK().getTrackerVersion()
        if self.eyelink_ver == 3:
            tvstr = pylink.getEYELINK().getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            self.tracker_software_ver = int(float(tvstr[(vindex + \
             len("EYELINK CL")):].strip()))
        if self.eyelink_ver == 1:
            self.eyelink_model = 'EyeLink I'
        elif self.eyelink_ver == 2:
            self.eyelink_model = 'EyeLink II'
        elif self.eyelink_ver == 3:
            self.eyelink_model = 'EyeLink 1000'
        else:
            self.eyelink_model = 'EyeLink (model unknown)'
        # Open graphics
        self.eyelink_graphics = EyelinkGraphics(self, _eyelink)
        pylink.openGraphicsEx(self.eyelink_graphics)
        # Optionally force drift correction. For some reason this must be done
        # as (one of) the first things, otherwise a segmentation fault occurs.
        if force_drift_correct:
            self.send_command('driftcorrect_cr_disable = OFF')
        # Set pupil-size mode
        if self.pupil_size_mode == 'area':
            pylink.getEYELINK().setPupilSizeDiameter(False)
        elif self.pupil_size_mode == 'diameter':
            pylink.getEYELINK().setPupilSizeDiameter(True)
        else:
            raise Exception(
             "pupil_size_mode should be 'area' or 'diameter', not %s" \
             % self.pupil_size_mode)
        pylink.getEYELINK().openDataFile(self.eyelink_data_file)
        pylink.flushGetkeyQueue()
        pylink.getEYELINK().setOfflineMode()
        # notify eyelink of display resolution
        self.send_command("screen_pixel_coords = 0 0 %d %d" % \
         (self.resolution[0], self.resolution[1]))
        # get some configuration stuff
        if self.eyelink_ver >= 2:
            self.send_command("select_parser_configuration 0")
            if self.eyelink_ver == 2:  # turn off scenelink camera stuff
                self.send_command("scene_camera_gazemap = NO")
        # set EDF file contents (this specifies which data is written to the EDF
        # file)
        self.send_command(
            "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON"
        )
        if self.tracker_software_ver >= 4:
            self.send_command(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET"
            )
        else:
            self.send_command(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
        # set link data (this specifies which data is sent through the link and
        # thus can be used in gaze contingent displays)
        self.send_command(
            "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        if self.tracker_software_ver >= 4:
            self.send_command(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET"
            )
        else:
            self.send_command(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")
        # not quite sure what this means (according to Sebastiaan Mathot, it
        # might be the button that is used to end drift correction?)
        self.send_command("button_function 5 'accept_target_fixation'")

        if not self.connected():
            raise Exception(
                "Error in libeyelink.libeyelink.__init__(): Failed to connect "
                "to the eyetracker!")
Example #38
0
class SMItracker(BaseEyeTracker):

	"""A class for SMI eye tracker objects"""

	def __init__(self, display, ip='127.0.0.1', sendport=4444, receiveport= \
		5555, logfile=LOGFILE, eventdetection=EVENTDETECTION, \
		saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, \
		**args):

		"""Initializes the SMItracker object
		
		arguments
		display	-- a pygaze.display.Display instance
		
		keyword arguments
		ip		-- internal ip address for iViewX (default = 
				   '127.0.0.1')
		sendport	-- port number for iViewX sending (default = 4444)
		receiveport	-- port number for iViewX receiving (default = 5555)
		logfile	-- logfile name (string value); note that this is the
				   name for the SMI logfile, NOT the .idf file
				   (default = LOGFILE)
		"""

		# try to copy docstrings (but ignore it if it fails, as we do
		# not need it for actual functioning of the code)
		try:
			copy_docstr(BaseEyeTracker, SMITracker)
		except:
			# we're not even going to show a warning, since the copied
			# docstring is useful for code editors; these load the docs
			# in a non-verbose manner, so warning messages would be lost
			pass

		# object properties
		self.disp = display
		self.screen = Screen()
		self.dispsize = DISPSIZE # display size in pixels
		self.screensize = SCREENSIZE # display size in cm
		self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
		self.errorbeep = Sound(osc='saw',freq=100, length=100)
		
		# output file properties
		self.outputfile = logfile
		self.description = "experiment" # TODO: EXPERIMENT NAME
		self.participant = "participant" # TODO: PP NAME
		
		# eye tracker properties
		self.connected = False
		self.recording = False
		self.eye_used = 0 # 0=left, 1=right, 2=binocular
		self.left_eye = 0
		self.right_eye = 1
		self.binocular = 2
		self.errdist = 2 # degrees; maximal error for drift correction
		self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
		self.prevsample = (-1,-1)
		self.prevps = -1
		
		# event detection properties
		self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
		self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
		self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold
		self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold
		self.eventdetection = eventdetection
		self.set_detection_type(self.eventdetection)
		self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

		# set logger
		res = iViewXAPI.iV_SetLogger(c_int(1), c_char_p(logfile + '_SMILOG.txt'))
		if res != 1:
			err = errorstring(res)
			raise Exception("Error in libsmi.SMItracker.__init__: failed to set logger; %s" % err)
		# first logger argument is for logging type (I'm guessing these are decimal bit codes)
		# LOG status					bitcode
		# 1 = LOG_LEVEL_BUG			 00001
		# 2 = LOG_LEVEL_iV_FCT		  00010
		# 4 = LOG_LEVEL_ETCOM		   00100
		# 8 = LOG_LEVEL_ALL			 01000
		# 16 = LOG_LEVEL_IV_COMMAND	 10000
		# these can be used together, using a bitwise or, e.g.: 1|2|4 (bitcode 00111)

		# connect to iViewX
		res = iViewXAPI.iV_Connect(c_char_p(ip), c_int(sendport), c_char_p(ip), c_int(receiveport))
		if res == 1:
			res = iViewXAPI.iV_GetSystemInfo(byref(systemData))
			self.samplerate = systemData.samplerate
			self.sampletime = 1000.0 / self.samplerate
			if res != 1:
				err = errorstring(res)
				raise Exception("Error in libsmi.SMItracker.__init__: failed to get system information; %s" % err)
		# handle connection errors
		else:
			self.connected = False
			err = errorstring(res)
			raise Exception("Error in libsmi.SMItracker.__init__: establishing connection failed; %s" % err)

		# initiation report
		self.log("pygaze initiation report start")
		self.log("experiment: %s" % self.description)
		self.log("participant: %s" % self.participant)
		self.log("display resolution: %sx%s" % (self.dispsize[0],self.dispsize[1]))
		self.log("display size in cm: %sx%s" % (self.screensize[0],self.screensize[1]))
		self.log("samplerate: %s Hz" % self.samplerate)
		self.log("sampletime: %s ms" % self.sampletime)
		self.log("fixation threshold: %s degrees" % self.fixtresh)
		self.log("speed threshold: %s degrees/second" % self.spdtresh)
		self.log("acceleration threshold: %s degrees/second**2" % self.accthresh)
		self.log("pygaze initiation report end")


	def calibrate(self, calibrate=True, validate=True):

		"""Calibrates the eye tracking system
		
		arguments
		None
		
		keyword arguments
		calibrate	-- Boolean indicating if calibration should be
				   performed (default = True)
		validate	-- Boolean indicating if validation should be performed
				   (default = True)
		
		returns
		success	-- returns True if calibration succeeded, or False if
				   not; in addition a calibration log is added to the
				   log file and some properties are updated (i.e. the
				   thresholds for detection algorithms)
		"""

		# TODO:
		# add feedback for calibration (e.g. with iV_GetAccuracyImage (struct ImageStruct * imageData) for accuracy and iV_GetEyeImage for cool eye pictures)
		# example: res = iViewXAPI.iV_GetEyeImage(byref(imageData))
		# ImageStruct has four data fields:
		# imageHeight	-- int vertical size (px)
		# imageWidth	-- int horizontal size (px)
		# imageSize		-- int image data size (byte)
		# imageBuffer	-- pointer to image data (I have NO idea what format this is in)

		# configure calibration (NOT starting it)
		calibrationData = CCalibration(9, 1, 0, 1, 1, 0, 127, 1, 15, b"") # (method (i.e.: number of points), visualization, display, speed, auto, fg, bg, shape, size, filename)

		# setup calibration
		res = iViewXAPI.iV_SetupCalibration(byref(calibrationData))
		if res != 1:
			err = errorstring(res)
			raise Exception("Error in libsmi.SMItracker.calibrate: failed to setup calibration; %s" % err)

		# calibrate
		cres = iViewXAPI.iV_Calibrate()
			
		# validate if calibration returns succes
		if cres == 1:
			cerr = None
			vres = iViewXAPI.iV_Validate()
			# handle validation errors
			if vres != 1:
				verr = errorstring(vres)
			else:
				verr = None
##				# TEST #
##				res = iViewXAPI.iV_GetAccuracyImage(byref(imageData))
##				self.log("IMAGEBUFFERSTART")
##				self.log(imageData.imageBuffer)
##				self.log("IMAGEBUFFERSTOP")
##				print("Image height: %s, image width: %s, image size: %s" % (imageData.imageHeight,imageData.imageWidth, imageData.imageSize))
##				print imageData.imageBuffer
##				########
		# handle calibration errors
		else:
			cerr = errorstring(cres)

		# return succes
		if cerr == None:
			print("libsmi.SMItracker.calibrate: calibration was succesful")
			if verr == None:
				print("libsmi.SMItracker.calibrate: validation was succesful")

				# present instructions
				self.disp.fill() # clear display
				self.screen.draw_text(text="Noise calibration: please look at the dot\n\n(press space to start)", pos=(self.dispsize[0]/2, int(self.dispsize[1]*0.2)), center=True)
				self.screen.draw_fixation(fixtype='dot')
				self.disp.fill(self.screen)
				self.disp.show()
				self.screen.clear() # clear screen again

				# wait for spacepress
				self.kb.get_key(keylist=['space'], timeout=None)

				# show fixation
				self.disp.fill()
				self.screen.draw_fixation(fixtype='dot')
				self.disp.fill(self.screen)
				self.disp.show()
				self.screen.clear()

				# wait for a bit, to allow participant to fixate
				clock.pause(500)

				# get samples
				sl = [self.sample()] # samplelist, prefilled with 1 sample to prevent sl[-1] from producing an error; first sample will be ignored for RMS calculation
				t0 = clock.get_time() # starting time
				while clock.get_time() - t0 < 1000:
					s = self.sample() # sample
					if s != sl[-1] and s != (-1,-1) and s != (0,0):
						sl.append(s)
				# calculate RMS noise
				Xvar = []
				Yvar = []
				for i in range(2,len(sl)):
					Xvar.append((sl[i][0]-sl[i-1][0])**2)
					Yvar.append((sl[i][1]-sl[i-1][1])**2)
				XRMS = (sum(Xvar) / len(Xvar))**0.5
				YRMS = (sum(Yvar) / len(Yvar))**0.5
				self.pxdsttresh = (XRMS, YRMS)

				# calculate pixels per cm
				pixpercm = (self.dispsize[0]/float(self.screensize[0]) + self.dispsize[1]/float(self.screensize[1])) / 2
				# get accuracy
				res = 0; i = 0
				while res != 1 and i < self.maxtries: # multiple tries, in case no (valid) sample is available
					res = iViewXAPI.iV_GetAccuracy(byref(accuracyData),0) # 0 is for 'no visualization'
					i += 1
					clock.pause(int(self.sampletime)) # wait for sampletime
				if res == 1:
					self.accuracy = ((accuracyData.deviationLX,accuracyData.deviationLY), (accuracyData.deviationLX,accuracyData.deviationLY)) # dsttresh = (left tuple, right tuple); tuple = (horizontal deviation, vertical deviation) in degrees of visual angle
				else:
					err = errorstring(res)
					print("WARNING libsmi.SMItracker.calibrate: failed to obtain accuracy data; %s" % err)
					self.accuracy = ((2,2),(2,2))
					print("libsmi.SMItracker.calibrate: As an estimate, the intersample distance threshhold was set to it's default value of 2 degrees")
				# get distance from screen to eyes (information from tracker)
				res = 0; i = 0
				while res != 1 and i < self.maxtries: # multiple tries, in case no (valid) sample is available
					res = iViewXAPI.iV_GetSample(byref(sampleData))
					i += 1
					clock.pause(int(self.sampletime)) # wait for sampletime
				if res == 1:
					screendist = sampleData.leftEye.eyePositionZ / 10.0 # eyePositionZ is in mm; screendist is in cm
				else:
					err = errorstring(res)
					print("WARNING libsmi.SMItracker.calibrate: failed to obtain screen distance; %s" % err)
					screendist = SCREENDIST
					print("libsmi.SMItracker.calibrate: As an estimate, the screendistance was set to it's default value of 57 cm")
				# calculate thresholds based on tracker settings
				self.pxerrdist = deg2pix(screendist, self.errdist, pixpercm)
				self.pxfixtresh = deg2pix(screendist, self.fixtresh, pixpercm)
				self.pxaccuracy = ((deg2pix(screendist, self.accuracy[0][0], pixpercm),deg2pix(screendist, self.accuracy[0][1], pixpercm)), (deg2pix(screendist, self.accuracy[1][0], pixpercm),deg2pix(screendist, self.accuracy[1][1], pixpercm)))
				self.pxspdtresh = deg2pix(screendist, self.spdtresh/1000.0, pixpercm) # in pixels per millisecond
				self.pxacctresh = deg2pix(screendist, self.accthresh/1000.0, pixpercm) # in pixels per millisecond**2

				# calibration report
				self.log("pygaze calibration report start")
				self.log("accuracy (degrees): LX=%s, LY=%s, RX=%s, RY=%s" % (self.accuracy[0][0],self.accuracy[0][1],self.accuracy[1][0],self.accuracy[1][1]))
				self.log("accuracy (in pixels): LX=%s, LY=%s, RX=%s, RY=%s" % (self.pxaccuracy[0][0],self.pxaccuracy[0][1],self.pxaccuracy[1][0],self.pxaccuracy[1][1]))
				self.log("precision (RMS noise in pixels): X=%s, Y=%s" % (self.pxdsttresh[0],self.pxdsttresh[1]))
				self.log("distance between participant and display: %s cm" % screendist)
				self.log("fixation threshold: %s pixels" % self.pxfixtresh)
				self.log("speed threshold: %s pixels/ms" % self.pxspdtresh)
				self.log("acceleration threshold: %s pixels/ms**2" % self.pxacctresh)
				self.log("pygaze calibration report end")

				return True

			# validation error
			else:
				print("WARNING libsmi.SMItracker.calibrate: validation was unsuccesful %s" % verr)
				return False

		# calibration error
		else:
			print("WARNING libsmi.SMItracker.calibrate: calibration was unsuccesful; %s" % cerr)
			return False


	def close(self):

		"""Neatly close connection to tracker
		
		arguments
		None
		
		returns
		Nothing	-- saves data and sets self.connected to False
		"""

		# save data
		res = iViewXAPI.iV_SaveData(str(self.outputfile), str(self.description), str(self.participant), 1)
		if res != 1:
			err = errorstring(res)
			raise Exception("Error in libsmi.SMItracker.close: failed to save data; %s" % err)

		# close connection
		iViewXAPI.iV_Disconnect()
		self.connected = False
		

	def connected(self):

		"""Checks if the tracker is connected
		
		arguments
		None
		
		returns
		connected	-- True if connection is established, False if not;
				   sets self.connected to the same value
		"""

		res = iViewXAPI.iV_IsConnected()

		if res == 1:
			self.connected = True
		else:
			self.connected = False

		return self.connected

	def drift_correction(self, pos=None, fix_triggered=False):

		"""Performs a drift check
		
		arguments
		None
		
		keyword arguments
		pos			-- (x, y) position of the fixation dot or None for
					   a central fixation (default = None)
		fix_triggered	-- Boolean indicating if drift check should be
					   performed based on gaze position (fix_triggered
					   = True) or on spacepress (fix_triggered = 
					   False) (default = False)
		
		returns
		checked		-- Boolaan indicating if drift check is ok (True)
					   or not (False); or calls self.calibrate if 'q'
					   or 'escape' is pressed
		"""

		if fix_triggered:
			return self.fix_triggered_drift_correction(pos)

		if pos == None:
			pos = self.dispsize[0] / 2, self.dispsize[1] / 2

		pressed = False
		while not pressed:
			pressed, presstime = self.kb.get_key()
			if pressed:
				if pressed == 'escape' or pressed == 'q':
					print("libsmi.SMItracker.drift_correction: 'q' or 'escape' pressed")
					return self.calibrate(calibrate=True, validate=True)
				gazepos = self.sample()
				if ((gazepos[0]-pos[0])**2  + (gazepos[1]-pos[1])**2)**0.5 < self.pxerrdist:
					return True
				else:
					self.errorbeep.play()
		return False
		

	def fix_triggered_drift_correction(self, pos=None, min_samples=10, max_dev=60, reset_threshold=30):

		"""Performs a fixation triggered drift correction by collecting
		a number of samples and calculating the average distance from the
		fixation position
		
		arguments
		None
		
		keyword arguments
		pos			-- (x, y) position of the fixation dot or None for
					   a central fixation (default = None)
		min_samples		-- minimal amount of samples after which an
					   average deviation is calculated (default = 10)
		max_dev		-- maximal deviation from fixation in pixels
					   (default = 60)
		reset_threshold	-- if the horizontal or vertical distance in
					   pixels between two consecutive samples is
					   larger than this threshold, the sample
					   collection is reset (default = 30)
		
		returns
		checked		-- Boolaan indicating if drift check is ok (True)
					   or not (False); or calls self.calibrate if 'q'
					   or 'escape' is pressed
		"""

		if pos == None:
			pos = self.dispsize[0] / 2, self.dispsize[1] / 2

		# loop until we have sufficient samples
		lx = []
		ly = []
		while len(lx) < min_samples:

			# pressing escape enters the calibration screen
			if self.kb.get_key()[0] in ['escape','q']:
				print("libsmi.SMItracker.fix_triggered_drift_correction: 'q' or 'escape' pressed")
				return self.calibrate(calibrate=True, validate=True)

			# collect a sample
			x, y = self.sample()

			if len(lx) == 0 or x != lx[-1] or y != ly[-1]:

				# if present sample deviates too much from previous sample, reset counting
				if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold):
					lx = []
					ly = []

				# collect samples
				else:
					lx.append(x)
					ly.append(y)

			if len(lx) == min_samples:

				avg_x = sum(lx) / len(lx)
				avg_y = sum(ly) / len(ly)
				d = ((avg_x - pos[0]) ** 2 + (avg_y - pos[1]) ** 2)**0.5

				if d < max_dev:
					return True
				else:
					lx = []
					ly = []

	def get_eyetracker_clock_async(self):

		"""Not supported for SMItracker (yet)"""

		print("function not supported yet")

	def log(self, msg):

		"""Writes a message to the log file
		
		arguments
		ms		-- a string to include in the log file
		
		returns
		Nothing	-- uses native log function of iViewX to include a line
				   in the log file
		"""

		res = iViewXAPI.iV_Log(c_char_p(msg))
		if res != 1:
			err = errorstring(res)
			print("WARNING libsmi.SMItracker.log: failed to log message '%s'; %s" % (msg,err))

	def log_var(self, var, val):

		"""Writes a variable to the log file
		
		arguments
		var		-- variable name
		val		-- variable value
		
		returns
		Nothing	-- uses native log function of iViewX to include a line
				   in the log file in a "var NAME VALUE" layout
		"""

		msg = "var %s %s" % (var, val)

		res = iViewXAPI.iV_Log(c_char_p(msg))
		if res != 1:
			err = errorstring(res)
			print("WARNING libsmi.SMItracker.log_var: failed to log variable '%s' with value '%s'; %s" % (var,val,err))

	def prepare_backdrop(self):

		"""Not supported for SMItracker (yet)"""

		print("function not supported yet")

	def prepare_drift_correction(self, pos):

		"""Not supported for SMItracker (yet)"""

		print("function not supported yet")

	def pupil_size(self):

		"""Return pupil size
		
		arguments
		None
		
		returns
		pupil size	-- returns pupil diameter for the eye that is currently
				   being tracked (as specified by self.eye_used) or -1
				   when no data is obtainable
		"""

		res = iViewXAPI.iV_GetSample(byref(sampleData))

		# if a new sample exists
		if res == 1:
			# left eye
			if self.eye_used == self.left_eye:
				ps = sampleData.leftEye.diam
			# right eye
			else:
				ps = sampleData.rightEye.diam
			# set prvious pupil size to newest pupil size
			self.prevps = ps
			
			return ps
		
		# no new sample available
		elif res == 2:
			
			return self.prevps
		
		# invalid data
		else:
			# print warning to interpreter
			err = errorstring(res)
			print("WARNING libsmi.SMItracker.pupil_size: failed to obtain sample; %s" % err)
			
			return -1


	def sample(self):

		"""Returns newest available gaze position
		
		arguments
		None
		
		returns
		sample	-- an (x,y) tuple or a (-1,-1) on an error
		"""

		res = iViewXAPI.iV_GetSample(byref(sampleData))

		if self.eye_used == self.right_eye:
			newsample = sampleData.rightEye.gazeX, sampleData.rightEye.gazeY
		else:
			newsample = sampleData.leftEye.gazeX, sampleData.leftEye.gazeY

		if res == 1:
			self.prevsample = newsample[:]
			return newsample
		elif res == 2:
			return self.prevsample
		else:
			err = errorstring(res)
			print("WARNING libsmi.SMItracker.sample: failed to obtain sample; %s" % err)
			return (-1,-1)


	def send_command(self, cmd):

		"""Sends a command to the eye tracker
		
		arguments
		cmd		-- the command (a string value) to be sent to iViewX
		
		returns
		Nothing
		"""

		try:
			iViewXAPI.iV_SendCommand(c_char_p(cmd))
		except:
			raise Exception("Error in libsmi.SMItracker.send_command: failed to send remote command to iViewX (iV_SendCommand might be deprecated)")

	def set_backdrop(self):

		"""Not supported for SMItracker (yet)"""

		print("function not supported yet")

	def set_eye_used(self):

		"""Logs the eye_used variable, based on which eye was specified
		(if both eyes are being tracked, the left eye is used)
		
		arguments
		None
		
		returns
		Nothing	-- logs which eye is used by calling self.log_var, e.g.
				   self.log_var("eye_used", "right")
		"""

		if self.eye_used == self.right_eye:
			self.log_var("eye_used", "right")
		else:
			self.log_var("eye_used", "left")


	def start_recording(self):

		"""Starts recording eye position
		
		arguments
		None
		
		returns
		Nothing	-- sets self.recording to True when recording is
				   successfully started
		"""

		res = 0; i = 0
		while res != 1 and i < self.maxtries:
			res = iViewXAPI.iV_StartRecording()
			i += 1
		
		if res == 1:
			self.recording = True
		else:
			self.recording = False
			err = errorstring(res)
			raise Exception("Error in libsmi.SMItracker.start_recording: %s" % err)


	def status_msg(self, msg):

		"""Not supported for SMItracker (yet)"""

		print("function not supported yet")


	def stop_recording(self):

		"""Stop recording eye position
		
		arguments
		None
		
		returns
		Nothing	-- sets self.recording to False when recording is
				   successfully started
		"""

		res = 0; i = 0
		while res != 1 and i < self.maxtries:
			res = iViewXAPI.iV_StopRecording()
			i += 1
		
		if res == 1:
			self.recording = False
		else:
			self.recording = False
			err = errorstring(res)
			raise Exception("Error in libsmi.SMItracker.stop_recording: %s" % err)
	
	
	def set_detection_type(self, eventdetection):
		
		"""Set the event detection type to either PyGaze algorithms, or
		native algorithms as provided by the manufacturer (only if
		available: detection type will default to PyGaze if no native
		functions are available)
		
		arguments
		eventdetection	--	a string indicating which detection type
						should be employed: either 'pygaze' for
						PyGaze event detection algorithms or
						'native' for manufacturers algorithms (only
						if available; will default to 'pygaze' if no
						native event detection is available)
		returns		--	detection type for saccades, fixations and
						blinks in a tuple, e.g. 
						('pygaze','native','native') when 'native'
						was passed, but native detection was not
						available for saccade detection
		"""
		
		if eventdetection in ['pygaze','native']:
			self.eventdetection = eventdetection
		
		return ('pygaze','native','pygaze')


	def wait_for_event(self, event):

		"""Waits for event
		
		arguments
		event		-- an integer event code, one of the following:
					3 = STARTBLINK
					4 = ENDBLINK
					5 = STARTSACC
					6 = ENDSACC
					7 = STARTFIX
					8 = ENDFIX
		
		returns
		outcome	-- a self.wait_for_* method is called, depending on the
				   specified event; the return values of corresponding
				   method are returned
		"""

		if event == 5:
			outcome = self.wait_for_saccade_start()
		elif event == 6:
			outcome = self.wait_for_saccade_end()
		elif event == 7:
			outcome = self.wait_for_fixation_start()
		elif event == 8:
			outcome = self.wait_for_fixation_end()
		elif event == 3:
			outcome = self.wait_for_blink_start()
		elif event == 4:
			outcome = self.wait_for_blink_end()
		else:
			raise Exception("Error in libsmi.SMItracker.wait_for_event: eventcode %s is not supported" % event)

		return outcome


	def wait_for_blink_end(self):

		"""Waits for a blink end and returns the blink ending time
		
		arguments
		None
		
		returns
		timestamp		--	blink ending time in milliseconds, as
						measured from experiment begin time
		"""

		
		# # # # #
		# SMI method

		if self.eventdetection == 'native':
			
			# print warning, since SMI does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer blink detection; PyGaze algorithm \
				will be used")

		# # # # #
		# PyGaze method
		
		blinking = True
		
		# loop while there is a blink
		while blinking:
			# get newest sample
			gazepos = self.sample()
			# check if it's valid
			if self.is_valid_sample(gazepos):
				# if it is a valid sample, blinking has stopped
				blinking = False
		
		# return timestamp of blink end
		return clock.get_time()		
		

	def wait_for_blink_start(self):

		"""Waits for a blink start and returns the blink starting time
		
		arguments
		None
		
		returns
		timestamp		--	blink starting time in milliseconds, as
						measured from experiment begin time
		"""
		
		# # # # #
		# SMI method

		if self.eventdetection == 'native':
			
			# print warning, since SMI does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer blink detection; PyGaze algorithm \
				will be used")

		# # # # #
		# PyGaze method
		
		blinking = False
		
		# loop until there is a blink
		while not blinking:
			# get newest sample
			gazepos = self.sample()
			# check if it's a valid sample
			if not self.is_valid_sample(gazepos):
				# get timestamp for possible blink start
				t0 = clock.get_time()
				# loop until a blink is determined, or a valid sample occurs
				while not self.is_valid_sample(self.sample()):
					# check if time has surpassed 150 ms
					if clock.get_time()-t0 >= 150:
						# return timestamp of blink start
						return t0
		

	def wait_for_fixation_end(self):

		"""Returns time and gaze position when a fixation has ended;
		function assumes that a 'fixation' has ended when a deviation of
		more than self.pxfixtresh from the initial fixation position has
		been detected (self.pxfixtresh is created in self.calibration,
		based on self.fixtresh, a property defined in self.__init__)
		
		arguments
		None
		
		returns
		time, gazepos	-- time is the starting time in milliseconds (from
					   expstart), gazepos is a (x,y) gaze position
					   tuple of the position from which the fixation
					   was initiated
		"""

		# # # # #
		# SMI method

		if self.eventdetection == 'native':
			
			moving = True			
			while moving:
				# get newest event
				res = 0
				while res != 1:
					res = iViewXAPI.iV_GetEvent(byref(eventData))
					stime = clock.get_time()
				# check if event is a fixation (SMI only supports
				# fixations at the moment)
				if eventData.eventType == 'F':
					# get timestamp and starting position
					timediff = stime - (int(eventData.startTime) / 1000.0)
					etime = timediff + (int(eventData.endTime) / 1000.0) # time is in microseconds
					fixpos = (evenData.positionX, evenData.positionY)
					# return starting time and position
					return etime, fixpos

		# # # # #
		# PyGaze method
		
		else:
			
			# function assumes that a 'fixation' has ended when a deviation of more than fixtresh
			# from the initial 'fixation' position has been detected
			
			# get starting time and position
			stime, spos = self.wait_for_fixation_start()
			
			# loop until fixation has ended
			while True:
				# get new sample
				npos = self.sample() # get newest sample
				# check if sample is valid
				if self.is_valid_sample(npos):
					# check if sample deviates to much from starting position
					if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras
						# break loop if deviation is too high
						break
	
			return clock.get_time(), spos


	def wait_for_fixation_start(self):

		"""Returns starting time and position when a fixation is started;
		function assumes a 'fixation' has started when gaze position
		remains reasonably stable (i.e. when most deviant samples are
		within self.pxfixtresh) for five samples in a row (self.pxfixtresh
		is created in self.calibration, based on self.fixtresh, a property
		defined in self.__init__)
		
		arguments
		None
		
		returns
		time, gazepos	-- time is the starting time in milliseconds (from
					   expstart), gazepos is a (x,y) gaze position
					   tuple of the position from which the fixation
					   was initiated
		"""
		
		# # # # #
		# SMI method

		if self.eventdetection == 'native':
			
			# print warning, since SMI does not have a fixation start
			# detection built into their API (only ending)
			
			print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer fixation START detection (only \
				fixation ENDING; PyGaze algorithm will be used")
			
			
		# # # # #
		# PyGaze method
		
		# function assumes a 'fixation' has started when gaze position
		# remains reasonably stable for self.fixtimetresh
		
		# get starting position
		spos = self.sample()
		while not self.is_valid_sample(spos):
			spos = self.sample()
		
		# get starting time
		t0 = clock.get_time()

		# wait for reasonably stable position
		moving = True
		while moving:
			# get new sample
			npos = self.sample()
			# check if sample is valid
			if self.is_valid_sample(npos):
				# check if new sample is too far from starting position
				if (npos[0]-spos[0])**2 + (npos[1]-spos[1])**2 > self.pxfixtresh**2: # Pythagoras
					# if not, reset starting position and time
					spos = copy.copy(npos)
					t0 = clock.get_time()
				# if new sample is close to starting sample
				else:
					# get timestamp
					t1 = clock.get_time()
					# check if fixation time threshold has been surpassed
					if t1 - t0 >= self.fixtimetresh:
						# return time and starting position
						return t1, spos


	def wait_for_saccade_end(self):

		"""Returns ending time, starting and end position when a saccade is
		ended; based on Dalmaijer et al. (2013) online saccade detection
		algorithm
		
		arguments
		None
		
		returns
		endtime, startpos, endpos	-- endtime in milliseconds (from 
							   expbegintime); startpos and endpos
							   are (x,y) gaze position tuples
		"""

		# # # # #
		# SMI method

		if self.eventdetection == 'native':
			
			# print warning, since SMI does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer saccade detection; PyGaze \
				algorithm will be used")

		# # # # #
		# PyGaze method
		
		# get starting position (no blinks)
		t0, spos = self.wait_for_saccade_start()
		# get valid sample
		prevpos = self.sample()
		while not self.is_valid_sample(prevpos):
			prevpos = self.sample()
		# get starting time, intersample distance, and velocity
		t1 = clock.get_time()
		s = ((prevpos[0]-spos[0])**2 + (prevpos[1]-spos[1])**2)**0.5 # = intersample distance = speed in px/sample
		v0 = s / (t1-t0)

		# run until velocity and acceleration go below threshold
		saccadic = True
		while saccadic:
			# get new sample
			newpos = self.sample()
			t1 = clock.get_time()
			if self.is_valid_sample(newpos) and newpos != prevpos:
				# calculate distance
				s = ((newpos[0]-prevpos[0])**2 + (newpos[1]-prevpos[1])**2)**0.5 # = speed in pixels/sample
				# calculate velocity
				v1 = s / (t1-t0)
				# calculate acceleration
				a = (v1-v0) / (t1-t0) # acceleration in pixels/sample**2 (actually is v1-v0 / t1-t0; but t1-t0 = 1 sample)
				# check if velocity and acceleration are below threshold
				if v1 < self.pxspdtresh and (a > -1*self.pxacctresh and a < 0):
					saccadic = False
					epos = newpos[:]
					etime = clock.get_time()
				# update previous values
				t0 = copy.copy(t1)
				v0 = copy.copy(v1)
			# udate previous sample
			prevpos = newpos[:]

		return etime, spos, epos


	def wait_for_saccade_start(self):

		"""Returns starting time and starting position when a saccade is
		started; based on Dalmaijer et al. (2013) online saccade detection
		algorithm
		
		arguments
		None
		
		returns
		endtime, startpos	-- endtime in milliseconds (from expbegintime);
					   startpos is an (x,y) gaze position tuple
		"""

		# # # # #
		# SMI method

		if self.eventdetection == 'native':
			
			# print warning, since SMI does not have a blink detection
			# built into their API
			
			print("WARNING! 'native' event detection has been selected, \
				but SMI does not offer saccade detection; PyGaze \
				algorithm will be used")

		# # # # #
		# PyGaze method
		
		# get starting position (no blinks)
		newpos = self.sample()
		while not self.is_valid_sample(newpos):
			newpos = self.sample()
		# get starting time, position, intersampledistance, and velocity
		t0 = clock.get_time()
		prevpos = newpos[:]
		s = 0
		v0 = 0

		# get samples
		saccadic = False
		while not saccadic:
			# get new sample
			newpos = self.sample()
			t1 = clock.get_time()
			if self.is_valid_sample(newpos) and newpos != prevpos:
				# check if distance is larger than precision error
				sx = newpos[0]-prevpos[0]; sy = newpos[1]-prevpos[1]
				if (sx/self.pxdsttresh[0])**2 + (sy/self.pxdsttresh[1])**2 > self.weightdist: # weigthed distance: (sx/tx)**2 + (sy/ty)**2 > 1 means movement larger than RMS noise
					# calculate distance
					s = ((sx)**2 + (sy)**2)**0.5 # intersampledistance = speed in pixels/ms
					# calculate velocity
					v1 = s / (t1-t0)
					# calculate acceleration
					a = (v1-v0) / (t1-t0) # acceleration in pixels/ms**2
					# check if either velocity or acceleration are above threshold values
					if v1 > self.pxspdtresh or a > self.pxacctresh:
						saccadic = True
						spos = prevpos[:]
						stime = clock.get_time()
					# update previous values
					t0 = copy.copy(t1)
					v0 = copy.copy(v1)

				# udate previous sample
				prevpos = newpos[:]

		return stime, spos
	
	
	def is_valid_sample(self, gazepos):
		
		"""Checks if the sample provided is valid, based on SMI specific
		criteria (for internal use)
		
		arguments
		gazepos		--	a (x,y) gaze position tuple, as returned by
						self.sample()
		
		returns
		valid			--	a Boolean: True on a valid sample, False on
						an invalid sample
		"""
		
		# return False if a sample is invalid
		if gazepos == (-1,-1):
			return False
		# sometimes, on SMI devices, invalid samples can actually contain
		# numbers; these do 
		elif sum(gazepos) < 10 and 0.0 in gazepos:
			return False
		
		# in any other case, the sample is valid
		return True
Example #39
0
    def __init__(self,
                 display,
                 ip='127.0.0.1',
                 sendport=4444,
                 receiveport=5555,
                 logfile=settings.LOGFILE,
                 eventdetection=settings.EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 **args):
        """Initializes the SMItracker object
		
		arguments
		display	-- a pygaze.display.Display instance
		
		keyword arguments
		ip		-- internal ip address for iViewX (default = 
				   '127.0.0.1')
		sendport	-- port number for iViewX sending (default = 4444)
		receiveport	-- port number for iViewX receiving (default = 5555)
		logfile	-- logfile name (string value); note that this is the
				   name for the SMI logfile, NOT the .idf file
				   (default = LOGFILE)
		"""

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, SMITracker)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        # object properties
        self.disp = display
        self.screen = Screen()
        self.dispsize = settings.DISPSIZE  # display size in pixels
        self.screensize = settings.SCREENSIZE  # display size in cm
        self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
        self.errorbeep = Sound(osc='saw', freq=100, length=100)

        # output file properties
        self.outputfile = logfile
        self.description = "experiment"  # TODO: EXPERIMENT NAME
        self.participant = "participant"  # TODO: PP NAME

        # eye tracker properties
        self.connected = False
        self.recording = False
        self.eye_used = 0  # 0=left, 1=right, 2=binocular
        self.left_eye = 0
        self.right_eye = 1
        self.binocular = 2
        self.errdist = 2  # degrees; maximal error for drift correction
        self.maxtries = 100  # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        self.fixtresh = 1.5  # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100  # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
        self.spdtresh = saccade_velocity_threshold  # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold  # degrees per second**2; saccade acceleration threshold
        self.eventdetection = eventdetection
        self.set_detection_type(self.eventdetection)
        self.weightdist = 10  # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

        # set logger
        res = iViewXAPI.iV_SetLogger(c_int(1),
                                     c_char_p(logfile + '_SMILOG.txt'))
        if res != 1:
            err = errorstring(res)
            raise Exception(
                "Error in libsmi.SMItracker.__init__: failed to set logger; %s"
                % err)
        # first logger argument is for logging type (I'm guessing these are decimal bit codes)
        # LOG status					bitcode
        # 1 = LOG_LEVEL_BUG			 00001
        # 2 = LOG_LEVEL_iV_FCT		  00010
        # 4 = LOG_LEVEL_ETCOM		   00100
        # 8 = LOG_LEVEL_ALL			 01000
        # 16 = LOG_LEVEL_IV_COMMAND	 10000
        # these can be used together, using a bitwise or, e.g.: 1|2|4 (bitcode 00111)

        # connect to iViewX
        res = iViewXAPI.iV_Connect(c_char_p(ip), c_int(sendport), c_char_p(ip),
                                   c_int(receiveport))
        if res == 1:
            res = iViewXAPI.iV_GetSystemInfo(byref(systemData))
            self.samplerate = systemData.samplerate
            self.sampletime = 1000.0 / self.samplerate
            if res != 1:
                err = errorstring(res)
                raise Exception(
                    "Error in libsmi.SMItracker.__init__: failed to get system information; %s"
                    % err)
        # handle connection errors
        else:
            self.connected = False
            err = errorstring(res)
            raise Exception(
                "Error in libsmi.SMItracker.__init__: establishing connection failed; %s"
                % err)

        # initiation report
        self.log("pygaze initiation report start")
        self.log("experiment: %s" % self.description)
        self.log("participant: %s" % self.participant)
        self.log("display resolution: %sx%s" %
                 (self.dispsize[0], self.dispsize[1]))
        self.log("display size in cm: %sx%s" %
                 (self.screensize[0], self.screensize[1]))
        self.log("samplerate: %s Hz" % self.samplerate)
        self.log("sampletime: %s ms" % self.sampletime)
        self.log("fixation threshold: %s degrees" % self.fixtresh)
        self.log("speed threshold: %s degrees/second" % self.spdtresh)
        self.log("acceleration threshold: %s degrees/second**2" %
                 self.accthresh)
        self.log("pygaze initiation report end")
Example #40
0
	def __init__(self, display, ip='127.0.0.1', sendport=4444, receiveport= \
		5555, logfile=LOGFILE, eventdetection=EVENTDETECTION, \
		saccade_velocity_threshold=35, saccade_acceleration_threshold=9500, \
		**args):

		"""Initializes the SMItracker object
		
		arguments
		display	-- a pygaze.display.Display instance
		
		keyword arguments
		ip		-- internal ip address for iViewX (default = 
				   '127.0.0.1')
		sendport	-- port number for iViewX sending (default = 4444)
		receiveport	-- port number for iViewX receiving (default = 5555)
		logfile	-- logfile name (string value); note that this is the
				   name for the SMI logfile, NOT the .idf file
				   (default = LOGFILE)
		"""

		# try to copy docstrings (but ignore it if it fails, as we do
		# not need it for actual functioning of the code)
		try:
			copy_docstr(BaseEyeTracker, SMITracker)
		except:
			# we're not even going to show a warning, since the copied
			# docstring is useful for code editors; these load the docs
			# in a non-verbose manner, so warning messages would be lost
			pass

		# object properties
		self.disp = display
		self.screen = Screen()
		self.dispsize = DISPSIZE # display size in pixels
		self.screensize = SCREENSIZE # display size in cm
		self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
		self.errorbeep = Sound(osc='saw',freq=100, length=100)
		
		# output file properties
		self.outputfile = logfile
		self.description = "experiment" # TODO: EXPERIMENT NAME
		self.participant = "participant" # TODO: PP NAME
		
		# eye tracker properties
		self.connected = False
		self.recording = False
		self.eye_used = 0 # 0=left, 1=right, 2=binocular
		self.left_eye = 0
		self.right_eye = 1
		self.binocular = 2
		self.errdist = 2 # degrees; maximal error for drift correction
		self.maxtries = 100 # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
		self.prevsample = (-1,-1)
		self.prevps = -1
		
		# event detection properties
		self.fixtresh = 1.5 # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
		self.fixtimetresh = 100 # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
		self.spdtresh = saccade_velocity_threshold # degrees per second; saccade velocity threshold
		self.accthresh = saccade_acceleration_threshold # degrees per second**2; saccade acceleration threshold
		self.eventdetection = eventdetection
		self.set_detection_type(self.eventdetection)
		self.weightdist = 10 # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

		# set logger
		res = iViewXAPI.iV_SetLogger(c_int(1), c_char_p(logfile + '_SMILOG.txt'))
		if res != 1:
			err = errorstring(res)
			raise Exception("Error in libsmi.SMItracker.__init__: failed to set logger; %s" % err)
		# first logger argument is for logging type (I'm guessing these are decimal bit codes)
		# LOG status					bitcode
		# 1 = LOG_LEVEL_BUG			 00001
		# 2 = LOG_LEVEL_iV_FCT		  00010
		# 4 = LOG_LEVEL_ETCOM		   00100
		# 8 = LOG_LEVEL_ALL			 01000
		# 16 = LOG_LEVEL_IV_COMMAND	 10000
		# these can be used together, using a bitwise or, e.g.: 1|2|4 (bitcode 00111)

		# connect to iViewX
		res = iViewXAPI.iV_Connect(c_char_p(ip), c_int(sendport), c_char_p(ip), c_int(receiveport))
		if res == 1:
			res = iViewXAPI.iV_GetSystemInfo(byref(systemData))
			self.samplerate = systemData.samplerate
			self.sampletime = 1000.0 / self.samplerate
			if res != 1:
				err = errorstring(res)
				raise Exception("Error in libsmi.SMItracker.__init__: failed to get system information; %s" % err)
		# handle connection errors
		else:
			self.connected = False
			err = errorstring(res)
			raise Exception("Error in libsmi.SMItracker.__init__: establishing connection failed; %s" % err)

		# initiation report
		self.log("pygaze initiation report start")
		self.log("experiment: %s" % self.description)
		self.log("participant: %s" % self.participant)
		self.log("display resolution: %sx%s" % (self.dispsize[0],self.dispsize[1]))
		self.log("display size in cm: %sx%s" % (self.screensize[0],self.screensize[1]))
		self.log("samplerate: %s Hz" % self.samplerate)
		self.log("sampletime: %s ms" % self.sampletime)
		self.log("fixation threshold: %s degrees" % self.fixtresh)
		self.log("speed threshold: %s degrees/second" % self.spdtresh)
		self.log("acceleration threshold: %s degrees/second**2" % self.accthresh)
		self.log("pygaze initiation report end")
Example #41
0
    def __init__(self, display, logfile=settings.LOGFILE, \
        alea_key=settings.ALEAKEY, \
        animated_calibration=settings.ALEAANIMATEDCALIBRATION, \
        eventdetection=settings.EVENTDETECTION, \
        saccade_velocity_threshold=35, \
        saccade_acceleration_threshold=9500, \
        blink_threshold=settings.BLINKTHRESH, \
        **args):
        """Initializes the AleaTracker object
        
        arguments
        display    -- a pygaze.display.Display instance
        
        keyword arguments
        logfile    -- logfile name (string value); note that this is the
                   name for the eye data log file (default = LOGFILE)
        """

        # try to copy docstrings (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, AleaTracker)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        # object properties
        self.disp = display
        self.screen = Screen()
        self.dispsize = self.disp.dispsize  # display size in pixels
        self.screensize = settings.SCREENSIZE  # display size in cm
        self.kb = Keyboard(keylist=['space', 'escape', 'q'], timeout=1)
        self.errorbeep = Sound(osc='saw', freq=100, length=100)

        # show a message
        self.screen.clear()
        self.screen.draw_text(
            text="Initialising the eye tracker, please wait...", fontsize=20)
        self.disp.fill(self.screen)
        self.disp.show()

        # output file properties
        self.outputfile = logfile + '.tsv'

        # calibration properties
        self.animated_calibration = animated_calibration == True

        # eye tracker properties
        self.connected = False
        self.recording = False
        self.errdist = 2  # degrees; maximal error for drift correction
        self.pxerrdist = 30  # initial error in pixels
        self.maxtries = 100  # number of samples obtained before giving up (for obtaining accuracy and tracker distance information, as well as starting or stopping recording)
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        self.fixtresh = 1.5  # degrees; maximal distance from fixation start (if gaze wanders beyond this, fixation has stopped)
        self.fixtimetresh = 100  # milliseconds; amount of time gaze has to linger within self.fixtresh to be marked as a fixation
        self.spdtresh = saccade_velocity_threshold  # degrees per second; saccade velocity threshold
        self.accthresh = saccade_acceleration_threshold  # degrees per second**2; saccade acceleration threshold
        self.blinkthresh = blink_threshold  # milliseconds; blink detection threshold used in PyGaze method
        self.eventdetection = eventdetection
        self.set_detection_type(self.eventdetection)
        self.weightdist = 10  # weighted distance, used for determining whether a movement is due to measurement error (1 is ok, higher is more conservative and will result in only larger saccades to be detected)

        # connect to the tracker
        self.alea = OGAleaTracker(alea_key, file_path=self.outputfile)

        # get info on the sample rate
        # TODO: Compute after streaming some samples?
        self.samplerate = 60.0
        self.sampletime = 1000.0 / self.samplerate

        # initiation report
        self.log("pygaze initiation report start")
        self.log("display resolution: {}x{}".format( \
            self.dispsize[0], self.dispsize[1]))
        self.log("display size in cm: {}x{}".format( \
            self.screensize[0], self.screensize[1]))
        self.log("samplerate: {} Hz".format(self.samplerate))
        self.log("sampletime: {} ms".format(self.sampletime))
        self.log("fixation threshold: {} degrees".format(self.fixtresh))
        self.log("speed threshold: {} degrees/second".format(self.spdtresh))
        self.log("acceleration threshold: {} degrees/second**2".format( \
            self.accthresh))
        self.log("pygaze initiation report end")
Example #42
0
class EyelinkGraphics(custom_display):

    """
	Implements the EyeLink graphics that are shown on the experimental PC, such
	as the camera image, and the calibration dots. This class only implements
	the drawing operations, and little to no of the logic behind the set-up,
	which is implemented in PyLink.
	"""

    def __init__(self, libeyelink, tracker):

        """
		Constructor.

		Arguments:
		libeyelink	--	A libeyelink object.
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

        pylink.EyeLinkCustomDisplay.__init__(self)

        # objects
        self.libeyelink = libeyelink
        self.display = libeyelink.display
        self.screen = Screen(disptype=DISPTYPE, mousevisible=False)
        self.kb = Keyboard(keylist=None, timeout=0)
        self.mouse = Mouse(timeout=0)
        if DISPTYPE == "pygame":
            self.kb.set_timeout(timeout=0.001)
            # If we are using a DISPTYPE that cannot be used directly, we have to
            # save the camera image to a temporary file on each frame.
            # if DISPTYPE not in ('pygame', 'psychopy'):
        import tempfile
        import os

        self.tmp_file = os.path.join(tempfile.gettempdir(), "__eyelink__.jpg")
        # drawing properties
        self.xc = self.display.dispsize[0] / 2
        self.yc = self.display.dispsize[1] / 2
        self.extra_info = True
        self.ld = 40  # line distance
        self.fontsize = libeyelink.fontsize
        self.title = ""
        self.display_open = True
        # menu
        self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False)
        self.menuscreen.draw_text(
            text="Eyelink calibration menu",
            pos=(self.xc, self.yc - 6 * self.ld),
            center=True,
            font="mono",
            fontsize=int(2 * self.fontsize),
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="%s (pygaze %s, pylink %s)" % (libeyelink.eyelink_model, pygaze.version, pylink.__version__),
            pos=(self.xc, self.yc - 5 * self.ld),
            center=True,
            font="mono",
            fontsize=int(0.8 * self.fontsize),
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press C to calibrate",
            pos=(self.xc, self.yc - 3 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press V to validate",
            pos=(self.xc, self.yc - 2 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press A to auto-threshold",
            pos=(self.xc, self.yc - 1 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press I to toggle extra info in camera image",
            pos=(self.xc, self.yc - 0 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press Enter to show camera image",
            pos=(self.xc, self.yc + 1 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="(then change between images using the arrow keys)",
            pos=(self.xc, self.yc + 2 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press Escape to abort experiment",
            pos=(self.xc, self.yc + 4 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press Q to exit menu",
            pos=(self.xc, self.yc + 5 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        # beeps
        self.__target_beep__ = Sound(osc="sine", freq=440, length=50, attack=0, decay=0, soundfile=None)
        self.__target_beep__done__ = Sound(osc="sine", freq=880, length=200, attack=0, decay=0, soundfile=None)
        self.__target_beep__error__ = Sound(osc="sine", freq=220, length=200, attack=0, decay=0, soundfile=None)
        # Colors
        self.color = {
            pylink.CR_HAIR_COLOR: pygame.Color("white"),
            pylink.PUPIL_HAIR_COLOR: pygame.Color("white"),
            pylink.PUPIL_BOX_COLOR: pygame.Color("green"),
            pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color("red"),
            pylink.MOUSE_CURSOR_COLOR: pygame.Color("red"),
            "font": pygame.Color("white"),
        }
        # Font
        pygame.font.init()
        self.font = pygame.font.SysFont("Courier New", 11)
        # further properties
        self.state = None
        self.pal = None

        self.size = (0, 0)
        self.set_tracker(tracker)
        self.last_mouse_state = -1
        self.bit64 = "64bit" in platform.architecture()
        self.imagebuffer = self.new_array()

    def close(self):

        """
		Is called when the connection and display are shutting down.		
		"""

        self.display_open = False

    def new_array(self):

        """
		Creates a new array with a system-specific format.
		
		Returns:
		An array.
		"""

        # On 64 bit Linux, we need to use an unsigned int data format.
        # <https://www.sr-support.com/showthread.php?3215-Visual-glitch-when-/
        # sending-eye-image-to-display-PC&highlight=ubuntu+pylink>
        if os.name == "posix" and self.bit64:
            return array.array("I")
        return array.array("L")

    def set_tracker(self, tracker):

        """
		Connects the tracker to the graphics environment.

		Arguments:
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

        self.tracker = tracker
        self.tracker_version = tracker.getTrackerVersion()
        if self.tracker_version >= 3:
            self.tracker.sendCommand("enable_search_limits=YES")
            self.tracker.sendCommand("track_search_limits=YES")
            self.tracker.sendCommand("autothreshold_click=YES")
            self.tracker.sendCommand("autothreshold_repeat=YES")
            self.tracker.sendCommand("enable_camera_position_detect=YES")

    def setup_cal_display(self):

        """
		Sets up the initial calibration display, which contains a menu with
		instructions.
		"""

        # show instructions
        self.display.fill(self.menuscreen)
        self.display.show()

    def exit_cal_display(self):

        """Exits calibration display."""

        self.clear_cal_display()

    def record_abort_hide(self):

        """TODO: What does this do?"""

        pass

    def clear_cal_display(self):

        """Clears the calibration display"""

        self.display.fill()
        self.display.show()

    def erase_cal_target(self):

        """TODO: What does this do?"""

        self.clear_cal_display()

    def draw_cal_target(self, x, y):

        """
		Draws calibration target.

		Arguments:
		x		--	The X coordinate of the target.
		y		--	The Y coordinate of the target.
		"""

        self.play_beep(pylink.CAL_TARG_BEEP)
        self.screen.clear()
        self.screen.draw_fixation(fixtype="dot", pos=(x, y))
        self.display.fill(screen=self.screen)
        self.display.show()

    def play_beep(self, beepid):

        """
		Plays a sound.

		Arguments:
		beepid		--	A number that identifies the sound.
		"""

        if beepid == pylink.CAL_TARG_BEEP:
            # For some reason, playing the beep here doesn't work, so we have
            # to play it when the calibration target is drawn.
            if EYELINKCALBEEP:
                self.__target_beep__.play()
        elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP:
            # show a picture
            self.screen.clear()
            self.screen.draw_text(
                text="calibration lost, press 'Enter' to return to menu",
                pos=(self.xc, self.yc),
                center=True,
                font="mono",
                fontsize=self.fontsize,
                antialias=True,
            )
            self.display.fill(self.screen)
            self.display.show()
            # play beep
            self.__target_beep__error__.play()
        elif beepid == pylink.CAL_GOOD_BEEP:
            self.screen.clear()
            if self.state == "calibration":
                self.screen.draw_text(
                    text="Calibration succesfull, press 'v' to validate",
                    pos=(self.xc, self.yc),
                    center=True,
                    font="mono",
                    fontsize=self.fontsize,
                    antialias=True,
                )
            elif self.state == "validation":
                self.screen.draw_text(
                    text="Validation succesfull, press 'Enter' to return to menu",
                    pos=(self.xc, self.yc),
                    center=True,
                    font="mono",
                    fontsize=self.fontsize,
                    antialias=True,
                )
            else:
                self.screen.draw_text(
                    text="Press 'Enter' to return to menu",
                    pos=(self.xc, self.yc),
                    center=True,
                    font="mono",
                    fontsize=self.fontsize,
                    antialias=True,
                )
                # show screen
            self.display.fill(self.screen)
            self.display.show()
            # play beep
            self.__target_beep__done__.play()
        else:  # 	DC_GOOD_BEEP	or DC_TARG_BEEP
            pass

    def draw_line(self, x1, y1, x2, y2, colorindex):

        """
		Unlike the function name suggests, this draws a single pixel. I.e.
		the end coordinates are always exactly one pixel away from the start
		coordinates.
		
		Arguments:
		x1			--	The starting x.
		y1			--	The starting y.
		x2			--	The end x.
		y2			--	The end y.
		colorIndex	--	A color index.
		"""

        x1 = int(self.scale * x1)
        y1 = int(self.scale * y1)
        x2 = int(self.scale * x2)
        y2 = int(self.scale * y2)
        pygame.draw.line(self.cam_img, self.color[colorindex], (x1, y1), (x2, y2))

    def draw_lozenge(self, x, y, w, h, colorindex):

        """
		desc:
			Draws a rectangle.
			
		arguments:
			x:
				desc:	X coordinate.
				type:	int
			y:
				desc:	Y coordinate.
				type:	int
			w:
				desc:	A width.
				type:	int
			h:
				desc:	A height.
				type:	int
			colorindex:
				desc:	A colorindex.
				type:	int
		"""

        x = int(self.scale * x)
        y = int(self.scale * y)
        w = int(self.scale * w)
        h = int(self.scale * h)
        pygame.draw.rect(self.cam_img, self.color[colorindex], (x, y, w, h), 2)

    def draw_title(self):

        """
		desc:
			Draws title info.
		"""

        y = 0
        for line in self.title:
            surf = self.font.render(line, 0, self.color["font"])
            self.cam_img.blit(surf, (1, y))
            y += 12

    def get_mouse_state(self):

        """
		desc:
			Gets the mouse position and state.
			
		returns:
			desc:	A (pos, state) tuple.
			type:	tuple.		
		"""

        button, pos, time = self.mouse.get_clicked()
        if button == None:
            button = -1
        if pos == None:
            pos = self.mouse.get_pos()
        return pos, button

    def get_input_key(self):

        """
		Gets an input key.

		Returns:
		A list containing a single pylink key identifier.
		"""

        # Don't try to collect key presses when the display is no longer
        # available. This is necessary, because pylink polls key presses during
        # file transfer, which generally occurs after the display has been
        # closed.
        if not self.display_open:
            return None
        try:
            key, time = self.kb.get_key(keylist=None, timeout="default")
        except:
            self.esc_pressed = True
            key = "q"
        if key == None:
            return None
            # Escape functions as a 'q' with the additional esc_pressed flag
        if key == "escape":
            key = "q"
            self.esc_pressed = True
            # Process regular keys
        if key == "return":
            keycode = pylink.ENTER_KEY
            self.state = None
        elif key == "space":
            keycode = ord(" ")
        elif key == "q":
            keycode = pylink.ESC_KEY
            self.state = None
        elif key == "c":
            keycode = ord("c")
            self.state = "calibration"
        elif key == "v":
            keycode = ord("v")
            self.state = "validation"
        elif key == "a":
            keycode = ord("a")
        elif key == "i":
            self.extra_info = not self.extra_info
            keycode = 0
        elif key == "up":
            keycode = pylink.CURS_UP
        elif key == "down":
            keycode = pylink.CURS_DOWN
        elif key == "left":
            keycode = pylink.CURS_LEFT
        elif key == "right":
            keycode = pylink.CURS_RIGHT
        else:
            keycode = 0
            # Convert key to PyLink keycode and return
        return [pylink.KeyInput(keycode, 0)]  # 0 = pygame.KMOD_NONE

    def exit_image_display(self):

        """Exits the image display."""

        self.clear_cal_display()

    def alert_printf(self, msg):

        """
		Prints alert message.

		Arguments:
		msg		--	The message to be played.
		"""

        print "eyelink_graphics.alert_printf(): %s" % msg

    def setup_image_display(self, width, height):

        """
		Initializes the buffer that will contain the camera image.

		Arguments:
		width		--	The width of the image.
		height		--	The height of the image.
		"""

        self.size = width, height
        self.clear_cal_display()
        self.last_mouse_state = -1
        self.imagebuffer = self.new_array()

    def image_title(self, text):

        """
		Sets the current image title.

		Arguments:
		text	--	An image title.
		"""

        while ": " in text:
            text = text.replace(": ", ":")
        self.title = text.split()

    def draw_image_line(self, width, line, totlines, buff):

        """
		Draws a single eye video frame, line by line.

		Arguments:

		width		--	Width of the video.
		line		--	Line nr of current line.
		totlines	--	Total lines in video.
		buff		--	Frame buffer.
		imagesize	--	The size of the image, which is (usually?) 192x160 px.
		"""

        # If the buffer hasn't been filled yet, add a line.
        for i in range(width):
            try:
                self.imagebuffer.append(self.pal[buff[i]])
            except:
                pass
                # If the buffer is full, push it to the display.
        if line == totlines:
            self.scale = totlines / 320.0
            self._size = int(self.scale * self.size[0]), int(self.scale * self.size[1])
            # Convert the image buffer to a pygame image, save it ...
            self.cam_img = pygame.image.fromstring(self.imagebuffer.tostring(), self._size, "RGBX")
            if self.extra_info:
                self.draw_cross_hair()
                self.draw_title()
            pygame.image.save(self.cam_img, self.tmp_file)
            # ... and then show the image.
            self.screen.clear()
            self.screen.draw_image(self.tmp_file, scale=1.5 / self.scale)
            self.display.fill(self.screen)
            self.display.show()
            # Clear the buffer for the next round!
            self.imagebuffer = self.new_array()

    def set_image_palette(self, r, g, b):

        """
		Sets the image palette.

		TODO: What this function actually does is highly mysterious. Figure it
		out!

		Arguments:
		r		--	The red channel.
		g		--	The green channel.
		b		--	The blue channel.
		"""

        self.imagebuffer = self.new_array()
        self.clear_cal_display()
        sz = len(r)
        i = 0
        self.pal = []
        while i < sz:
            rf = int(b[i])
            gf = int(g[i])
            bf = int(r[i])
            self.pal.append((rf << 16) | (gf << 8) | (bf))
            i += 1
mouse = Mouse(mousebuttonlist=None, timeout=None)

# Initialise a log.
log = Logfile()
header = ['trialnr', 'nstim', 'fixonset', 'stimonset', 'maintenanceonset', \
    'probeonset', 'RT', 'response']
for i in range(max(NSTIM)):
    header.extend(['stimx%d' % (i), 'stimy%d' % (i), 'stimori%d' % (i), \
        'stimerror%d' % (i)])
header.extend(['E', 'X', 'T'])
for i in range(max(NSTIM)-1):
    header.append('NT%d' % i)
log.write(header)

# Initialise a blank Screen for ad-hoc drawing.
scr = Screen()

# Initialise a blank Screen.
blankscr = Screen()

# Initialise a fixation Screen.
fixscr = Screen()
fixscr.draw_fixation(fixtype=FIXTYPE, diameter=FIXSIZE, pw=FIXPW)

# Initialise stimulus and probe Screens.
stimscr = {}
probescr = {}
for nstim in NSTIM:
    locs = nstim * [DISPCENTRE]
    oris = nstim * [0]
    stimscr[nstim] = StimScreen(nstim, locs, oris, \
Example #44
0
    def __init__(self, libeyelink, tracker):

        """
		Constructor.

		Arguments:
		libeyelink	--	A libeyelink object.
		tracker		--	An tracker object as returned by pylink.EyeLink().
		"""

        pylink.EyeLinkCustomDisplay.__init__(self)

        # objects
        self.libeyelink = libeyelink
        self.display = libeyelink.display
        self.screen = Screen(disptype=DISPTYPE, mousevisible=False)
        self.kb = Keyboard(keylist=None, timeout=0)
        self.mouse = Mouse(timeout=0)
        if DISPTYPE == "pygame":
            self.kb.set_timeout(timeout=0.001)
            # If we are using a DISPTYPE that cannot be used directly, we have to
            # save the camera image to a temporary file on each frame.
            # if DISPTYPE not in ('pygame', 'psychopy'):
        import tempfile
        import os

        self.tmp_file = os.path.join(tempfile.gettempdir(), "__eyelink__.jpg")
        # drawing properties
        self.xc = self.display.dispsize[0] / 2
        self.yc = self.display.dispsize[1] / 2
        self.extra_info = True
        self.ld = 40  # line distance
        self.fontsize = libeyelink.fontsize
        self.title = ""
        self.display_open = True
        # menu
        self.menuscreen = Screen(disptype=DISPTYPE, mousevisible=False)
        self.menuscreen.draw_text(
            text="Eyelink calibration menu",
            pos=(self.xc, self.yc - 6 * self.ld),
            center=True,
            font="mono",
            fontsize=int(2 * self.fontsize),
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="%s (pygaze %s, pylink %s)" % (libeyelink.eyelink_model, pygaze.version, pylink.__version__),
            pos=(self.xc, self.yc - 5 * self.ld),
            center=True,
            font="mono",
            fontsize=int(0.8 * self.fontsize),
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press C to calibrate",
            pos=(self.xc, self.yc - 3 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press V to validate",
            pos=(self.xc, self.yc - 2 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press A to auto-threshold",
            pos=(self.xc, self.yc - 1 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press I to toggle extra info in camera image",
            pos=(self.xc, self.yc - 0 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press Enter to show camera image",
            pos=(self.xc, self.yc + 1 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="(then change between images using the arrow keys)",
            pos=(self.xc, self.yc + 2 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press Escape to abort experiment",
            pos=(self.xc, self.yc + 4 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        self.menuscreen.draw_text(
            text="Press Q to exit menu",
            pos=(self.xc, self.yc + 5 * self.ld),
            center=True,
            font="mono",
            fontsize=self.fontsize,
            antialias=True,
        )
        # beeps
        self.__target_beep__ = Sound(osc="sine", freq=440, length=50, attack=0, decay=0, soundfile=None)
        self.__target_beep__done__ = Sound(osc="sine", freq=880, length=200, attack=0, decay=0, soundfile=None)
        self.__target_beep__error__ = Sound(osc="sine", freq=220, length=200, attack=0, decay=0, soundfile=None)
        # Colors
        self.color = {
            pylink.CR_HAIR_COLOR: pygame.Color("white"),
            pylink.PUPIL_HAIR_COLOR: pygame.Color("white"),
            pylink.PUPIL_BOX_COLOR: pygame.Color("green"),
            pylink.SEARCH_LIMIT_BOX_COLOR: pygame.Color("red"),
            pylink.MOUSE_CURSOR_COLOR: pygame.Color("red"),
            "font": pygame.Color("white"),
        }
        # Font
        pygame.font.init()
        self.font = pygame.font.SysFont("Courier New", 11)
        # further properties
        self.state = None
        self.pal = None

        self.size = (0, 0)
        self.set_tracker(tracker)
        self.last_mouse_state = -1
        self.bit64 = "64bit" in platform.architecture()
        self.imagebuffer = self.new_array()
Example #45
0
    def __init__(self,
                 display,
                 resolution=settings.DISPSIZE,
                 data_file=settings.LOGFILENAME + ".edf",
                 fg_color=settings.FGC,
                 bg_color=settings.BGC,
                 eventdetection=settings.EVENTDETECTION,
                 saccade_velocity_threshold=35,
                 saccade_acceleration_threshold=9500,
                 blink_threshold=settings.BLINKTHRESH,
                 force_drift_correct=True,
                 pupil_size_mode=settings.EYELINKPUPILSIZEMODE,
                 **args):
        """See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""

        # try to import copy docstring (but ignore it if it fails, as we do
        # not need it for actual functioning of the code)
        try:
            copy_docstr(BaseEyeTracker, libeyelink)
        except:
            # we're not even going to show a warning, since the copied
            # docstring is useful for code editors; these load the docs
            # in a non-verbose manner, so warning messages would be lost
            pass

        global _eyelink

        # Make sure that we have a valid data file. The local_data_file may
        # contain a folder. The eyelink_data_file is only a basename, i.e.
        # without folder. The eyelink_data_file must be at most eight characters
        # and end with a `.edf` extension.

        self.local_data_file = data_file
        self.eyelink_data_file = os.path.basename(data_file)
        stem, ext = os.path.splitext(self.eyelink_data_file)
        if len(stem) > 8 or ext.lower() != '.edf':
            raise Exception(
                "The EyeLink cannot handle filenames longer than eight "
                "characters (excluding '.edf' extension).")

        # properties
        self.display = display
        self.fontsize = 18
        self.scr = Screen(disptype=settings.DISPTYPE, mousevisible=False)
        self.kb = Keyboard(keylist=["escape", "q"], timeout=1)
        self.resolution = resolution
        self.recording = False
        self.saccade_velocity_treshold = saccade_velocity_threshold
        self.saccade_acceleration_treshold = saccade_acceleration_threshold
        self.blink_threshold = blink_threshold
        self.eye_used = None
        self.left_eye = 0
        self.right_eye = 1
        self.binocular = 2
        self.pupil_size_mode = pupil_size_mode
        self.prevsample = (-1, -1)
        self.prevps = -1

        # event detection properties
        # degrees; maximal distance from fixation start (if gaze wanders beyond
        # this, fixation has stopped)
        self.fixtresh = 1.5
        # milliseconds; amount of time gaze has to linger within self.fixtresh
        # to be marked as a fixation
        self.fixtimetresh = 100
        # degrees per second; saccade velocity threshold
        self.spdtresh = self.saccade_velocity_treshold
        # degrees per second**2; saccade acceleration threshold
        self.accthresh = self.saccade_acceleration_treshold
        self.set_detection_type(eventdetection)
        # weighted distance, used for determining whether a movement is due to
        # measurement error (1 is ok, higher is more conservative and will
        # result in only larger saccades to be detected)
        self.weightdist = 10
        # distance between participant and screen in cm
        self.screendist = settings.SCREENDIST
        # distance between participant and screen in cm
        self.screensize = settings.SCREENSIZE
        self.pixpercm = (self.resolution[0]/float(self.screensize[0]) + \
         self.resolution[1]/float(self.screensize[1])) / 2.0
        # only initialize eyelink once
        if _eyelink == None:
            try:
                _eyelink = pylink.EyeLink()
            except:
                raise Exception(
                    "Error in libeyelink.libeyelink.__init__(): Failed to "
                    "connect to the tracker!")
        # determine software version of tracker
        self.tracker_software_ver = 0
        self.eyelink_ver = pylink.getEYELINK().getTrackerVersion()
        if self.eyelink_ver == 3:
            tvstr = pylink.getEYELINK().getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            self.tracker_software_ver = int(float(tvstr[(vindex + \
             len("EYELINK CL")):].strip()))
        if self.eyelink_ver == 1:
            self.eyelink_model = 'EyeLink I'
        elif self.eyelink_ver == 2:
            self.eyelink_model = 'EyeLink II'
        elif self.eyelink_ver == 3:
            self.eyelink_model = 'EyeLink 1000'
        else:
            self.eyelink_model = 'EyeLink (model unknown)'
        # Open graphics
        self.eyelink_graphics = EyelinkGraphics(self, _eyelink)
        pylink.openGraphicsEx(self.eyelink_graphics)
        # Optionally force drift correction. For some reason this must be done
        # as (one of) the first things, otherwise a segmentation fault occurs.
        if force_drift_correct:
            try:
                self.send_command('driftcorrect_cr_disable = OFF')
            except:
                print('Failed to force drift correction (EyeLink 1000 only)')
        # Set pupil-size mode
        if self.pupil_size_mode == 'area':
            pylink.getEYELINK().setPupilSizeDiameter(False)
        elif self.pupil_size_mode == 'diameter':
            pylink.getEYELINK().setPupilSizeDiameter(True)
        else:
            raise Exception(
             "pupil_size_mode should be 'area' or 'diameter', not %s" \
             % self.pupil_size_mode)
        pylink.getEYELINK().openDataFile(self.eyelink_data_file)
        pylink.flushGetkeyQueue()
        pylink.getEYELINK().setOfflineMode()
        # notify eyelink of display resolution
        self.send_command("screen_pixel_coords = 0 0 %d %d" % \
         (self.resolution[0], self.resolution[1]))
        # get some configuration stuff
        if self.eyelink_ver >= 2:
            self.send_command("select_parser_configuration 0")
            if self.eyelink_ver == 2:  # turn off scenelink camera stuff
                self.send_command("scene_camera_gazemap = NO")
        # set EDF file contents (this specifies which data is written to the EDF
        # file)
        self.send_command(
            "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON"
        )
        if self.tracker_software_ver >= 4:
            self.send_command(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET"
            )
        else:
            self.send_command(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
        # set link data (this specifies which data is sent through the link and
        # thus can be used in gaze contingent displays)
        self.send_command(
            "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        if self.tracker_software_ver >= 4:
            self.send_command(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET"
            )
        else:
            self.send_command(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")
        # not quite sure what this means (according to Sebastiaan Mathot, it
        # might be the button that is used to end drift correction?)
        self.send_command("button_function 5 'accept_target_fixation'")

        if not self.connected():
            raise Exception(
                "Error in libeyelink.libeyelink.__init__(): Failed to connect "
                "to the eyetracker!")
Example #46
0
class Dummy(DumbDummy):

	"""A dummy class to run experiments in dummy mode, where eye movements are simulated by the mouse"""
	

	def __init__(self, display):

		"""Initiates an eyetracker dummy object, that simulates gaze position using the mouse
		
		arguments
		display		--	a pygaze display.Display instance
		
		keyword arguments
		None
		"""

		# try to copy docstrings (but ignore it if it fails, as we do
		# not need it for actual functioning of the code)
		try:
			copy_docstr(BaseEyeTracker, Dummy)
		except:
			# we're not even going to show a warning, since the copied
			# docstring is useful for code editors; these load the docs
			# in a non-verbose manner, so warning messages would be lost
			pass

		self.recording = False
		self.blinking = False
		self.bbpos = (settings.DISPSIZE[0]/2, settings.DISPSIZE[1]/2)
		self.resolution = settings.DISPSIZE[:]
		self.simulator = Mouse(disptype=settings.DISPTYPE, mousebuttonlist=None,
			timeout=2, visible=False)
		self.kb = Keyboard(disptype=settings.DISPTYPE, keylist=None,
			timeout=None)
		self.angrybeep = Sound(osc='saw',freq=100, length=100, attack=0,
			decay=0, soundfile=None)
		self.display = display
		self.screen = Screen(disptype=settings.DISPTYPE, mousevisible=False)

	def calibrate(self):

		"""Dummy calibration"""

		print("Calibration would now take place")
		clock.pause(1000)

	def drift_correction(self, pos=None, fix_triggered=False):

		"""Dummy drift correction"""

		print("Drift correction would now take place")
		
		if fix_triggered:
			return self.fix_triggered_drift_correction(pos)
		
		if pos == None:
			pos = settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2

		# show mouse
		self.simulator.set_visible(visible=True)
		
		# show fixation dot
		self.draw_drift_correction_target(pos[0], pos[1])

		# perform drift check
		errdist = 60 # pixels (on a 1024x768px and 39.9x29.9cm monitor at 67 cm, this is about 2 degrees of visual angle)
		pressed = None
		while True:
			# check for keyboard input
			pressed, presstime = self.kb.get_key(keylist=['q','escape','space'], timeout=1)
			
			# quit key
			if pressed in ['q','escape']:
				# hide mouse
				self.simulator.set_visible(visible=False)
				return False
				
			# space bar
			elif pressed == 'space':
				# get sample
				gazepos = self.sample()
				# sample is close enough to fixation dot
				if ((gazepos[0]-pos[0])**2  + (gazepos[1]-pos[1])**2)**0.5 < errdist:
					# hide mouse
					self.simulator.set_visible(visible=False)
					return True
				# sample is NOT close enough to fixation dot
				else:
					# show discontent
					self.angrybeep.play()


	def fix_triggered_drift_correction(self, pos=None, min_samples=30, max_dev=60, reset_threshold=10):

		"""Dummy drift correction (fixation triggered)"""

		print("Drift correction (fixation triggered) would now take place")

		if pos == None:
			pos = settings.DISPSIZE[0] / 2, settings.DISPSIZE[1] / 2

		# show mouse
		self.simulator.set_visible(visible=True)

		# show fixation dot
		self.draw_drift_correction_target(pos[0], pos[1])

		while True:
			# loop until we have sufficient samples
			lx = []
			ly = []
			while len(lx) < min_samples:
	
				# pressing escape enters the calibration screen
				if self.kb.get_key(keylist=["escape", "q"], timeout=0)[0] != None:
					self.recording = False
					print("libeyetracker.libeyetracker.fix_triggered_drift_correction(): 'q' pressed")
					self.simulator.set_visible(visible=False)
					return False
	
				# collect a sample
				x, y = self.sample()
	
				if len(lx) == 0 or x != lx[-1] or y != ly[-1]:
	
					# if present sample deviates too much from previous sample, reset counting
					if len(lx) > 0 and (abs(x - lx[-1]) > reset_threshold or abs(y - ly[-1]) > reset_threshold):
						lx = []
						ly = []
	
					# collect samples
					else:
						lx.append(x)
						ly.append(y)

				# check if samples are within max. deviation
				if len(lx) == min_samples:
	
					avg_x = sum(lx) / len(lx)
					avg_y = sum(ly) / len(ly)
					d = ((avg_x - pos[0]) ** 2 + (avg_y - pos[1]) ** 2)**0.5
	
					if d < max_dev:
						self.simulator.set_visible(visible=False)
						return True
					else:
						lx = []
						ly = []
						

	def start_recording(self):

		"""Dummy for starting recording, prints what would have been the recording start"""

		self.simulator.set_visible(visible=True)
		dumrectime = clock.get_time()

		self.recording = True
		
		print("Recording would have started at: " + str(dumrectime))


	def stop_recording(self):

		"""Dummy for stopping recording, prints what would have been the recording end"""

		self.simulator.set_visible(visible=False)
		dumrectime = clock.get_time()

		self.recording = False

		print("Recording would have stopped at: " + str(dumrectime))


	def close(self):

		"""Dummy for closing connection with eyetracker, prints what would have been connection closing time"""

		if self.recording:
			self.stop_recording()
		
		closetime = clock.get_time()

		print("eyetracker connection would have closed at: " + str(closetime))

	def pupil_size(self):
		
		"""Returns dummy pupil size"""
		
		return 19


	def sample(self):

		"""Returns simulated gaze position (=mouse position)"""

		if self.blinking:
			if self.simulator.get_pressed()[2]: # buttondown
				self.simulator.set_pos(pos=(self.bbpos[0],self.resolution[1])) # set position to blinking position
			elif not self.simulator.get_pressed()[2]: # buttonup
				self.simulator.set_pos(pos=self.bbpos) # set position to position before blinking
				self.blinking = False # 'blink' stopped

		elif not self.blinking:
			if self.simulator.get_pressed()[2]: # buttondown
				self.blinking = True # 'blink' started
				self.bbpos =  self.simulator.get_pos() # position before blinking
				self.simulator.set_pos(pos=(self.bbpos[0],self.resolution[1])) # set position to blinking position

		return self.simulator.get_pos()

	def wait_for_saccade_start(self):

		"""Returns starting time and starting position when a simulated saccade is started"""

		# function assumes that a 'saccade' has been started when a deviation of more than
		# maxerr from the initial 'gaze' position has been detected (using Pythagoras, ofcourse)

		spos = self.sample() # starting position
		maxerr = 3 # pixels
		while True:
			npos = self.sample() # get newest sample
			if ((spos[0]-npos[0])**2  + (spos[1]-npos[1])**2)**0.5 > maxerr: # Pythagoras
				break

		return clock.get_time(), spos


	def wait_for_saccade_end(self):

		"""Returns ending time, starting and end position when a simulated saccade is ended"""

		# function assumes that a 'saccade' has ended when 'gaze' position remains reasonably
		# (i.e.: within maxerr) stable for five samples
		# for saccade start algorithm, see wait_for_fixation_start

		stime, spos = self.wait_for_saccade_start()
		maxerr = 3 # pixels
		
		# wait for reasonably stable position
		xl = [] # list for last five samples (x coordinate)
		yl = [] # list for last five samples (y coordinate)
		moving = True
		while moving:
			# check positions
			npos = self.sample()
			xl.append(npos[0]) # add newest sample
			yl.append(npos[1]) # add newest sample
			if len(xl) == 5:
				# check if deviation is small enough
				if max(xl)-min(xl) < maxerr and max(yl)-min(yl) < maxerr:
					moving = False
				# remove oldest sample
				xl.pop(0); yl.pop(0)
			# wait for a bit, to avoid immediately returning (runs go faster than mouse moves)
			clock.pause(10)

		return clock.get_time(), spos, (xl[len(xl)-1],yl[len(yl)-1])


	def wait_for_fixation_start(self):

		"""Returns starting time and position when a simulated fixation is started"""

		# function assumes a 'fixation' has started when 'gaze' position remains reasonably
		# stable for five samples in a row (same as saccade end)

		maxerr = 3 # pixels
		
		# wait for reasonably stable position
		xl = [] # list for last five samples (x coordinate)
		yl = [] # list for last five samples (y coordinate)
		moving = True
		while moving:
			npos = self.sample()
			xl.append(npos[0]) # add newest sample
			yl.append(npos[1]) # add newest sample
			if len(xl) == 5:
				# check if deviation is small enough
				if max(xl)-min(xl) < maxerr and max(yl)-min(yl) < maxerr:
					moving = False
				# remove oldest sample
				xl.pop(0); yl.pop(0)
			# wait for a bit, to avoid immediately returning (runs go faster than mouse moves)
			clock.pause(10)

		return clock.get_time(), (xl[len(xl)-1],yl[len(yl)-1])


	def wait_for_fixation_end(self):

		"""Returns time and gaze position when a simulated fixation is ended"""

		# function assumes that a 'fixation' has ended when a deviation of more than maxerr
		# from the initial 'fixation' position has been detected (using Pythagoras, ofcourse)

		stime, spos = self.wait_for_fixation_start()
		maxerr = 3 # pixels
		
		while True:
			npos = self.sample() # get newest sample
			if ((spos[0]-npos[0])**2  + (spos[1]-npos[1])**2)**0.5 > maxerr: # Pythagoras
				break

		return clock.get_time(), spos


	def wait_for_blink_start(self):

		"""Returns starting time and position of a simulated blink (mousebuttondown)"""

		# blinks are simulated with mouseclicks: a right mouseclick simulates the closing
		# of the eyes, a mousebuttonup the opening.

		while not self.blinking:
			pos = self.sample()

		return clock.get_time(), pos


	def wait_for_blink_end(self):

		"""Returns ending time and position of a simulated blink (mousebuttonup)"""
		
		# blinks are simulated with mouseclicks: a right mouseclick simulates the closing
		# of the eyes, a mousebuttonup the opening.

		# wait for blink start
		while not self.blinking:
			spos = self.sample()
		# wait for blink end
		while self.blinking:
			epos = self.sample()

		return clock.get_time(), epos
	
	def set_draw_drift_correction_target_func(self, func):
		
		"""See pygaze._eyetracker.baseeyetracker.BaseEyeTracker"""
		
		self.draw_drift_correction_target = func
	
	# ***
	#
	# Internal functions below
	#
	# ***

	def draw_drift_correction_target(self, x, y):
		
		"""
		Draws the drift-correction target.
		
		arguments
		
		x		--	The X coordinate
		y		--	The Y coordinate
		"""
		
		self.screen.clear()
		self.screen.draw_fixation(fixtype='dot', colour=settings.FGC, \
			pos=(x,y), pw=0, diameter=12)
		self.display.fill(self.screen)
		self.display.show()