def __init__(self, display, filename="TEST.EDF"):
        EyeLink.__init__(self)
        self._disp = display
        self._filename = filename
        self.openDataFile(filename)

        self._surfs = [0,0,1920,1080]#self._disp.get_rect()
        pylink.flushGetkeyQueue()
        
        col = 128
        print "1"
        pylink.setCalibrationColors((0, 0, 0), (col, col, col)) #Sets the calibration target and background color
        print int(self._surfs[3]/300)
#        pylink.setTargetSize(int(self._surfs[2]/70), int(self._surfs[3]/300)) #select best size for calibration target
        print "12"
        pylink.setCalibrationSounds("off", "off", "off")
        pylink.setDriftCorrectSounds("off", "off", "off")

        self.sendCommand("screen_pixel_coords =  0 0 %d %d" % (self._surfs[2], self._surfs[3]))
        self.sendMessage("DISPLAY_COORDS  0 0 %d %d" % (self._surfs[2], self._surfs[3]))
        
        #self.sendMessage("heuristic_filter" % (self._surfs[2], self._surfs[3]))
        #assert self.getTrackerVersion() == 2
        self.sendCommand("select_parser_configuration 0")
        self.setFileEventFilter("LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON")
        self.setFileSampleFilter("LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
        self.setLinkEventFilter("LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        self.setLinkSampleFilter("LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")
        print "2"
        self.sendCommand("button_function 5 'accept_target_fixation'")
Exemple #2
0
    def _trackerCreate(self):
        """Helper method not directly called in EyeScript scripts in general
        
        configures the Eyelink eyetracker
        """        
        try:
            self.tracker = pylink.EyeLink()
        except (RuntimeError,AttributeError):
            self.tracker = EyetrackerStub()

        #This tells the tracker to use VisionEgg to display Eyelink graphics
        #including calibration points, camera images, etc.
        self.eyelinkGraphics = EyeLinkCoreGraphicsVE(self.screen,self.tracker)
        pylink.openGraphicsEx(self.eyelinkGraphics)
        self.eyelinkGraphics.setCalibrationColors(self['color'],self['bgcolor'])

        for key,value in self.iteritems():
            command = key.split('_',1)
            if len(command) > 1 and command[0] == 'tracker':
                getattr(self.tracker,command[1])(value)
                
        if self['heuristic_filter'] == 'off': self.tracker.setHeuristicFilterOff()
        else: self.tracker.setHeuristicFilterOn()
         
        #Set whether beeps should be played during drift correction
        pylink.setDriftCorrectSounds(*self['setDriftCorrectSounds'])
      
        # open the datafile on the operator pc
        if self['subject'] > 0: self.tracker.openDataFile(self.edffile)
        
        self.tracker.sendCommand("screen_pixel_coords =  0 0 %d %d"%(self['screen_size']))
Exemple #3
0
 def _init_calibration(self):
     # Sets the calibration target and background color
     pylink.setCalibrationColors((0, 0, 0), (255, 255, 255))
     # select best size for calibration target
     pylink.setTargetSize(self.screenRect.width / 70,
                          self.screenRect.width / 300)
     pylink.setCalibrationSounds("off", "off", "off")
     pylink.setDriftCorrectSounds("off", "off", "off")
Exemple #4
0
 def setup_eyelink(self):
     
     '''Thank you very much
     
     '''
     # call for eyelink
     self.eyelink_tracker = pl.EyeLink("100.1.1.1")
     
     #parameters for eyelink
     self.monitor = monitors.Monitor('testMonitor')
     self.winSize = self.monitor.getSizePix()
     self.foreground = (250,250,250)
     self.background = (127,127,127)
     
     # create file
     self.edfFileName = "cbConfig" + str(self.subjectID)
     if len(self.edfFileName) > 8:
         self.edfFileName = self.edfFileName[0:8]
     pl.getEYELINK().openDataFile(self.edfFileName)
     pl.getEYELINK().setOfflineMode()
 
     #Eyelink - Gets the display surface and sends a mesage to EDF file;
     pl.getEYELINK().sendCommand("screen_pixel_coords =  0 0 %d %d"%(self.winSize[0]-1, self.winSize[1]-1))
     pl.getEYELINK().sendMessage("Resolution %d %d" %((self.winSize[0]-1, self.winSize[1]-1)))
     pl.getEYELINK().sendMessage("EyeToScreen %d" %(self.monitor.getDistance()))
     pl.getEYELINK().sendMessage("MonitorWidth %d" %(self.monitor.getWidth()))
 
     #EyeLink - Set data file contents
     pl.getEYELINK().sendCommand("file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET,INPUT")
     pl.getEYELINK().sendCommand("link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET,INPUT")
 
     #EyeLink - Set Filter contents
     pl.getEYELINK().sendCommand("file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON,INPUT")
     pl.getEYELINK().sendCommand("link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON,INPUT")
 
     #EyeLink - Set Calibration Environment
     pl.setCalibrationColors(self.foreground, self.background);  	#Sets the calibration target and background color  - background color should match testing background        
     
     pl.flushGetkeyQueue()
     pl.getEYELINK().setOfflineMode()
     winX = int(self.winSize[0])
     winY = int(self.winSize[1])
     pl.openGraphics((winX,winY),32)
     pl.getEYELINK().doTrackerSetup()
     pl.closeGraphics()
     pl.setCalibrationSounds("", "", "");
     pl.setDriftCorrectSounds("", "off", "off");
     
     # close configuration file
     event.clearEvents()
     pl.getEYELINK().closeDataFile()
     transferFileName = self.edfFileName + '.edf' # fileName
     pl.getEYELINK().receiveDataFile(self.edfFileName, transferFileName)
 def eyeTrkCalib(self, colordepth=32):
     """
     callibration. not used?
     @param colordepth - color depth of display (why?)
     """
     sp = self.sp
     pl.openGraphics(sp, colordepth)
     pl.setCalibrationColors((255, 255, 255), (0, 0, 0))
     pl.setTargetSize(int(sp[0] / 70), int(sp[1] / 300))
     pl.setCalibrationSounds("", "", "")
     pl.setDriftCorrectSounds("", "off", "off")
     self.el.doTrackerSetup()
     pl.closeGraphics()
Exemple #6
0
 def eyeTrkCalib (el=el,dr=dr,cd=cd):
     # "opens the graphics if the display mode is not set"
     pl.openGraphics(dr,cd)
     pl.setCalibrationColors((255,255,255),(0,177,177))
     pl.setTargetSize(10, 5) 
     pl.setCalibrationSounds("","","")
     el.setCalibrationType('H3')
     pl.setDriftCorrectSounds("","off","off")
     el.disableAutoCalibration()
     el.doTrackerSetup()
     el.drawCalTarget(calTarg1)
     el.drawCalTarget(calTarg2)
     el.drawCalTarget(calTarg3)
     pl.closeGraphics()
     el.setOfflineMode()
Exemple #7
0
def eyeTrkCalib (el,dr,cd):
    # "opens the graphics if the display mode is not set"
    pl.openGraphics(dr,cd)
    pl.setCalibrationColors((255,255,255),(0,177,177))
    pl.setTargetSize(10, 5) 
    pl.setCalibrationSounds("","","")
    el.setCalibrationType('H3')
    pl.setDriftCorrectSounds("","off","off")
    el.disableAutoCalibration()
    el.doTrackerSetup()
    el.drawCalTarget(calTarg1)
    el.drawCalTarget(calTarg2)
    el.drawCalTarget(calTarg3)
    pl.closeGraphics()
    el.setOfflineMode()
Exemple #8
0
    def __init__(self,
                 win,
                 clock,
                 sj="TEST",
                 saccadeSensitivity=HIGH,
                 calibrationType='HV9',
                 calibrationTargetColor=WHITE,
                 calibrationBgColor=BLACK,
                 CalibrationSounds=False,
                 screen=(1024, 768)):
        '''win: psychopy visual window used for the experiment
          clock: psychopy time clock recording time for whole experiment
          sj: Subject identifier string (affects EDF filename)
          saccadeSensitivity:
            HIGH: Pursuit and neurological work
            LOW:  Cognitive research
          calibrationType:
            H3: Horizontal 3-point
            HV3: 3-point calibration, poor linearization
            HV5: 5-point calibration, poor at corners
            HV9: 9-point calibration, best overall
        calibrationTargetColor and calibrationBgColor:
            RGB tuple, i.e., (255,0,0) for Red
            One of: BLACK, WHITE, GRAY
        calibrationSounds:
            True: enable feedback sounds when calibrating'''
        self.edfFileName = str(
            sj) + ".EDF"  # Subject name only can put 8 characters
        print("Connecting to eyetracker.")
        self.tracker = pylink.EyeLink()
        self.timeCorrection = clock.getTime() - self.tracker.trackerTime()
        print("Loading custom graphics")
        #Initializes Experiment Graphics
        genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, screen)
        pylink.openGraphicsEx(genv)
        # opendatafile
        self.tracker.openDataFile(self.edfFileName)

        #EyeLink Tracker Configuration
        pylink.flushGetkeyQueue()
        # Initializes the key queue used by getkey(). It may be called at any time to get rid any of old keys from the queue.
        self.tracker.setOfflineMode()
        #Places EyeLink tracker in off-line (idle) mode. Wait till the tracker has finished the mode transition
        self.tracker.sendCommand("screen_pixel_coords =  0 0 %d %d" %
                                 (tuple(screen)))
        self.tracker.setCalibrationType(calibrationType)
        self.tracker.sendCommand(
            "driftcorrect_cr_disable=OFF"
        )  #CF - OFF: turns on drift CORRECT; AUTO: Turns on drift CHECK; ON: Turns off both
        #self.tracker.sendCommand("generate_default_targets = NO")
        #self.tracker.sendCommand("calibration_targets = 512,384 512,417 512,351 402,384 622,384 402,417 622,417 402,351 622,351")
        #self.tracker.sendCommand("validation_targets = 512,384 512,417 512,351 402,384 622,384 402,417 622,417 402,351 622,351")

        self.tracker.sendMessage("DISPLAY_COORDS  0 0 %d %d" % (tuple(screen)))
        eyelink_ver = self.tracker.getTrackerVersion()
        if eyelink_ver == 3:
            tvstr = self.tracker.getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            tracker_software_ver = int(
                float(tvstr[(vindex + len("EYELINK CL")):].strip()))
        else:
            tracker_software_ver = 0
        if eyelink_ver >= 2:
            self.tracker.sendCommand("select_parser_configuration %d" %
                                     saccadeSensitivity)
        else:
            if saccadeSensitivity == HIGH: svt, sat = 22, 5000
            else: svt, sat = 30, 9500
            self.tracker.sendCommand("saccade_velocity_threshold = %d" % svt)
            self.tracker.sendCommand("saccade_acceleration_threshold = %d" %
                                     sat)
        if eyelink_ver == 2:  #turn off scenelink camera stuff
            self.tracker.sendCommand("scene_camera_gazemap = NO")

        # set EDF file contents
        self.tracker.setFileEventFilter(
            "LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON")
        if tracker_software_ver >= 4:
            self.tracker.setFileSampleFilter(
                "LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET")
        else:
            self.tracker.setFileSampleFilter(
                "LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")

        # set link data (used for gaze cursor)
        self.tracker.setLinkEventFilter(
            "LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        if tracker_software_ver >= 4:
            self.tracker.setLinkSampleFilter(
                "LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET")
        else:
            self.tracker.setLinkSampleFilter(
                "LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")

        #self.tracker.setAcceptTargetFixationButton(1) # This programs a specific button for use in drift correction.

        #Set the calibration settings:
        #pylink.setCalibrationColors(WHITE, BLACK) # Sets the calibration target and background color(foreground_color, background_color)
        if CalibrationSounds:
            pylink.setCalibrationSounds("", "", "")
            pylink.setDriftCorrectSounds("", "off", "off")
        else:
            pylink.setCalibrationSounds("off", "off", "off")
            pylink.setDriftCorrectSounds("off", "off", "off")

        print("Beginning tracker setup")
        self.tracker.doTrackerSetup()
Exemple #9
0
def eyelinkChildFunction(
qTo
, qFrom
, windowSize = [200,200]
, windowPosition = [0,0]
, stimDisplayRes = [1920,1080]
, calibrationDisplaySize = [1920,1080]
, calibrationDotSize = 10
, eyelinkIp = '100.1.1.1'
, edfFileName = 'temp.edf'
, edfPath = './_Data/temp.edf'
, saccadeSoundFile = '_Stimuli/stop.wav'
, blinkSoundFile = '_Stimuli/stop.wav'
):
	import sdl2
	import sdl2.ext
	import math
	import OpenGL.GL as gl
	import sdl2.sdlmixer
	import pylink
	import numpy
	import sys
	import shutil
	import subprocess
	import time
	import os
	import array
	from PIL import Image
	from PIL import ImageDraw
	try:
		import appnope
		appnope.nope()
	except:
		pass

	byteify = lambda x, enc: x.encode(enc)

	sdl2.SDL_Init(sdl2.SDL_INIT_VIDEO)
	window = sdl2.ext.Window("eyelink",size=windowSize,position=windowPosition,flags=sdl2.SDL_WINDOW_SHOWN)
	windowID = sdl2.SDL_GetWindowID(window.window)
	windowSurf = sdl2.SDL_GetWindowSurface(window.window)
	sdl2.ext.fill(windowSurf.contents,sdl2.pixels.SDL_Color(r=0, g=0, b=0, a=255))
	window.refresh()

	for i in range(10):
		sdl2.SDL_PumpEvents() #to show the windows


	sdl2.SDL_Init(sdl2.SDL_INIT_AUDIO)
	sdl2.sdlmixer.Mix_OpenAudio(44100, sdl2.sdlmixer.MIX_DEFAULT_FORMAT, 2, 1024)
	class Sound:
		def __init__(self, fileName):
			self.sample = sdl2.sdlmixer.Mix_LoadWAV(sdl2.ext.compat.byteify(fileName, "utf-8"))
			self.started = False
		def play(self):
			self.channel = sdl2.sdlmixer.Mix_PlayChannel(-1, self.sample, 0)
			self.started = True
		def stillPlaying(self):
			if self.started:
				if sdl2.sdlmixer.Mix_Playing(self.channel):
					return True
				else:
					self.started = False
					return False
			else:
				return False

	saccadeSound = Sound(saccadeSoundFile)
	blinkSound = Sound(blinkSoundFile)

	def exitSafely():
		if 'eyelink' in locals():
			if eyelink.isRecording()==0:
				eyelink.stopRecording()
			eyelink.setOfflineMode()
			eyelink.closeDataFile()
			eyelink.receiveDataFile(edfFileName,'temp.edf')
			eyelink.close()
			if os.path.isfile('temp.edf'):
				shutil.move('temp.edf', edfPath)
				# if os.path.isfile(edfPath):
				# 	subprocess.call('./edf2asc -y ./'+edfPath,shell=True)
		sys.exit() #process gets hung here if called when showing images from eyelink


	pylink.setDriftCorrectSounds('off','off','off')
	pylink.setCalibrationSounds('off','off','off')

	edfPath = './_Data/temp.edf' #temporary default location, to be changed later when ID is established
	done = False
	while not done:
		try:
			# print '\neyelink: Attempting to connect to eyelink (check that wifi is off!)'
			eyelink = pylink.EyeLink(eyelinkIp)
			done = True
		except:
			while not qTo.empty():
				message = qTo.get()
				if message=='quit':
					exitSafely()
				else:
					qTo.put(message)		

	# print 'eyelink: connected'
	eyelink.sendCommand('select_parser_configuration 0')# 0--> standard (cognitive); 1--> sensitive (psychophysical)
	# eyelink.sendCommand('sample_rate 500')
	eyelink.setLinkEventFilter("SACCADE,BLINK,FIXATION,LEFT,RIGHT")
	eyelink.openDataFile(edfFileName)
	eyelink.sendCommand("screen_pixel_coords =  %d %d %d %d" %(stimDisplayRes[0]/2 - calibrationDisplaySize[0]/2 , stimDisplayRes[1]/2 - calibrationDisplaySize[1]/2 , stimDisplayRes[0]/2 + calibrationDisplaySize[0]/2 , stimDisplayRes[1]/2 + calibrationDisplaySize[1]/2 ))
	eyelink.sendMessage("DISPLAY_COORDS  0 0 %d %d" %(stimDisplayRes[0],stimDisplayRes[1]))
	eyelink.sendCommand("saccade_velocity_threshold = 60")
	eyelink.sendCommand("saccade_acceleration_threshold = 19500")

	class EyeLinkCoreGraphicsPySDL2(pylink.EyeLinkCustomDisplay):
		def __init__(self):
			# self.__target_beep__ = Sound('_Stimuli/type.wav')
			# self.__target_beep__done__ = Sound('qbeep.wav')
			# self.__target_beep__error__ = Sound('error.wav')
			if sys.byteorder == 'little':
				self.byteorder = 1
			else:
				self.byteorder = 0
			self.imagebuffer = array.array('I')
			self.pal = None
			self.__img__ = None
		def record_abort_hide(self):
			pass
		def play_beep(self,beepid):
			pass
			# if beepid == pylink.DC_TARG_BEEP or beepid == pylink.CAL_TARG_BEEP:
			# 	self.__target_beep__.play()
			# elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP:
			# 	self.__target_beep__error__.play()
			# else:#	CAL_GOOD_BEEP or DC_GOOD_BEEP
			# 	self.__target_beep__done__.play()
		def clear_cal_display(self):
			# # print 'clear_cal_display'
			qFrom.put('clearCalDisplay')
		def setup_cal_display(self):
			# # print 'setup_cal_display'
			qFrom.put('setupCalDisplay')
		def exit_cal_display(self): 
			# # print 'exit_cal_display'
			qFrom.put('exitCalDisplay')
		def erase_cal_target(self):
			# # print 'erase_cal_target'
			qFrom.put('eraseCalTarget')
		def draw_cal_target(self, x, y):
			# # print 'draw_cal_target'
			qFrom.put(['drawCalTarget',x,y])
		def setup_image_display(self, width, height):
			# # print 'eyelink: setup_image_display'
			self.img_size = (width,height)
			return(0)
		def exit_image_display(self):
			# # print 'eyelink: exit_image_display'
			pass
		def image_title(self,text):
			# # print 'eyelink: image_title'
			pass
		def set_image_palette(self, r,g,b):
			# # print 'eyelink: set_image_palette'
			self.imagebuffer = array.array('I')
			sz = len(r)
			i = 0
			self.pal = []
			while i < sz:
				rf = int(b[i])
				gf = int(g[i])
				bf = int(r[i])
				if self.byteorder:
					self.pal.append((rf<<16) | (gf<<8) | (bf))
				else:
					self.pal.append((bf<<24) |  (gf<<16) | (rf<<8)) #for mac
				i = i+1
		def draw_image_line(self, width, line, totlines,buff):
			# # print 'eyelink: draw_image_line'
			i = 0
			while i < width:
				if buff[i]>=len(self.pal):
					buff[i] = len(self.pal)-1
				self.imagebuffer.append(self.pal[buff[i]&0x000000FF])
				i = i+1
			if line == totlines:
				img = Image.fromstring('RGBX', (width,totlines), self.imagebuffer.tostring())
 				img = img.convert('RGBA')
				self.__img__ = img.copy()
				self.__draw__ = ImageDraw.Draw(self.__img__)
				self.draw_cross_hair() #inherited method, calls draw_line and draw_losenge
				qFrom.put(['image',numpy.array(self.__img__.resize([self.__img__.size[0]*4,self.__img__.size[1]*4],Image.BICUBIC))])
				self.__img__ = None
				self.__draw__ = None
				self.imagebuffer = array.array('I')
		def getColorFromIndex(self,colorindex):
			if colorindex   ==  pylink.CR_HAIR_COLOR:          return (255,255,255,255)
			elif colorindex ==  pylink.PUPIL_HAIR_COLOR:       return (255,255,255,255)
			elif colorindex ==  pylink.PUPIL_BOX_COLOR:        return (0,255,0,255)
			elif colorindex ==  pylink.SEARCH_LIMIT_BOX_COLOR: return (255,0,0,255)
			elif colorindex ==  pylink.MOUSE_CURSOR_COLOR:     return (255,0,0,255)
			else: return (0,0,0,0)
		def draw_line(self,x1,y1,x2,y2,colorindex):
			# # print 'eyelink: draw_line'
			if x1<0: x1 = 0
			if x2<0: x2 = 0
			if y1<0: y1 = 0
			if y2<0: y2 = 0
			if x1>self.img_size[0]: x1 = self.img_size[0]
			if x2>self.img_size[0]: x2 = self.img_size[0]
			if y1>self.img_size[1]: y1 = self.img_size[1]
			if y2>self.img_size[1]: y2 = self.img_size[1]
			imr = self.__img__.size
			x1 = int((float(x1)/float(self.img_size[0]))*imr[0])
			x2 = int((float(x2)/float(self.img_size[0]))*imr[0])
			y1 = int((float(y1)/float(self.img_size[1]))*imr[1])
			y2 = int((float(y2)/float(self.img_size[1]))*imr[1])
			color = self.getColorFromIndex(colorindex)
			self.__draw__.line( [(x1,y1),(x2,y2)] , fill=color)
			return 0
		def draw_lozenge(self,x,y,width,height,colorindex):
			# # print 'eyelink: draw_lozenge'
			color = self.getColorFromIndex(colorindex)
			imr = self.__img__.size
			x=int((float(x)/float(self.img_size[0]))*imr[0])
			width=int((float(width)/float(self.img_size[0]))*imr[0])
			y=int((float(y)/float(self.img_size[1]))*imr[1])
			height=int((float(height)/float(self.img_size[1]))*imr[1])
			if width>height:
				rad = height/2
				self.__draw__.line([(x+rad,y),(x+width-rad,y)],fill=color)
				self.__draw__.line([(x+rad,y+height),(x+width-rad,y+height)],fill=color)
				clip = (x,y,x+height,y+height)
				self.__draw__.arc(clip,90,270,fill=color)
				clip = ((x+width-height),y,x+width,y+height)
				self.__draw__.arc(clip,270,90,fill=color)
			else:
				rad = width/2
				self.__draw__.line([(x,y+rad),(x,y+height-rad)],fill=color)
				self.__draw__.line([(x+width,y+rad),(x+width,y+height-rad)],fill=color)
				clip = (x,y,x+width,y+width)
				self.__draw__.arc(clip,180,360,fill=color)
				clip = (x,y+height-width,x+width,y+height)
				self.__draw__.arc(clip,360,180,fill=color)
			return 0
		def get_mouse_state(self):
			# pos = pygame.mouse.get_pos()
			# state = pygame.mouse.get_pressed()
			# return (pos,state[0])
			pass
		def get_input_key(self):
			ky=[]
			while not qTo.empty():
				message = qTo.get()
				# print 'eyelink: '
				# print message
				if message=='button':
					ky.append(pylink.KeyInput(32,0)) #button translated to space keypress (for drift correct)
				# if message=='quit':
				# 	# print 'received message to exit'
				# 	exitSafely()
				# el
				elif message[0]=='keycode':
					keysym = message[1]
					keycode = keysym.sym
					if keycode == sdl2.SDLK_F1:           keycode = pylink.F1_KEY
					elif keycode == sdl2.SDLK_F2:         keycode = pylink.F2_KEY
					elif keycode == sdl2.SDLK_F3:         keycode = pylink.F3_KEY
					elif keycode == sdl2.SDLK_F4:         keycode = pylink.F4_KEY
					elif keycode == sdl2.SDLK_F5:         keycode = pylink.F5_KEY
					elif keycode == sdl2.SDLK_F6:         keycode = pylink.F6_KEY
					elif keycode == sdl2.SDLK_F7:         keycode = pylink.F7_KEY
					elif keycode == sdl2.SDLK_F8:         keycode = pylink.F8_KEY
					elif keycode == sdl2.SDLK_F9:         keycode = pylink.F9_KEY
					elif keycode == sdl2.SDLK_F10:        keycode = pylink.F10_KEY
					elif keycode == sdl2.SDLK_PAGEUP:     keycode = pylink.PAGE_UP
					elif keycode == sdl2.SDLK_PAGEDOWN:   keycode = pylink.PAGE_DOWN
					elif keycode == sdl2.SDLK_UP:         keycode = pylink.CURS_UP
					elif keycode == sdl2.SDLK_DOWN:       keycode = pylink.CURS_DOWN
					elif keycode == sdl2.SDLK_LEFT:       keycode = pylink.CURS_LEFT
					elif keycode == sdl2.SDLK_RIGHT:      keycode = pylink.CURS_RIGHT
					elif keycode == sdl2.SDLK_BACKSPACE:  keycode = ord('\b')
					elif keycode == sdl2.SDLK_RETURN:     keycode = pylink.ENTER_KEY
					elif keycode == sdl2.SDLK_ESCAPE:     keycode = pylink.ESC_KEY
					elif keycode == sdl2.SDLK_TAB:        keycode = ord('\t')
					elif keycode == pylink.JUNK_KEY:      keycode = 0
					ky.append(pylink.KeyInput(keycode,keysym.mod))
			return ky

	customDisplay = EyeLinkCoreGraphicsPySDL2()
	pylink.openGraphicsEx(customDisplay)
	newGazeTarget = False
	gazeTarget = numpy.array(calibrationDisplaySize)/2.0
	gazeTargetCriterion = calibrationDotSize
	doSounds = False
	reportSaccades = False
	reportBlinks = False
	lastMessageTime = time.time()
	lastStartBlinkTime = time.time()
	while True:
		sdl2.SDL_PumpEvents()
		for event in sdl2.ext.get_events():
			if event.type==sdl2.SDL_WINDOWEVENT:
				if (event.window.event==sdl2.SDL_WINDOWEVENT_CLOSE):
					exitSafely()
		if not qTo.empty():
			message = qTo.get()
			if message=='quit':
				exitSafely()
			elif message[0]=='edfPath':
				edfPath = message[1]
			elif message[0]=='doSounds':
				doSounds = message[1]
			elif message[0]=='reportSaccades':
				reportSaccades = message[1]
			elif message[0]=='reportBlinks':
				reportBlinks = message[1]
			elif message[0]=='sendMessage':
				eyelink.sendMessage(message[1])
			elif message[0]=='doDriftCorrect':
				# print 'eyelink: drift correct requested'
				if eyelink.isRecording()==0:
					eyelink.stopRecording()
				try:
					location = message[1]
					error = eyelink.doDriftCorrect(location[0],location[1],0,1)
					# print error
					# print 'eyelink: drift correct attempted'
					if error != 27: 
						qFrom.put('driftCorrectComplete')
					else:
						qFrom.put('doCalibration')
				except:
					qFrom.put('doCalibration')
			elif message=='startRecording':
				# print 'eyelink: received message to begin recording'
				eyelink.startRecording(1,1,1,1) #this retuns immediately takes 10-30ms to actually kick in on the tracker
				while not (eyelink.isRecording()==0):
					pass
					# print eyelink.isRecording()
				qFrom.put('recordingStarted')
			elif message[0]=='newGazeTarget':
				# # print message
				newGazeTarget = True
				gazeTarget = numpy.array(message[1])
				gazeTargetCriterion = numpy.array(message[2])
				# # print message
				# # print 'waiting for gaze confirmation'
			elif message[0]=='acceptTrigger':
				eyelink.accept_trigger()
			elif message=='doCalibration':
				doSounds = False
				if eyelink.isRecording()==0:
					eyelink.stopRecording()
				eyelink.doTrackerSetup()
				# # print 'calComplete'
				qFrom.put('calibrationComplete')
		if eyelink.isRecording()==0: #stupid, I know, but eyelink.isRecording() returns 0 if it *is* indeed recording!
			eyeData = eyelink.getNextData()
			# if eyeData==pylink.SAMPLE_TYPE:
			# 	eyeSample = eyelink.getFloatData()
			# 	gaze = None
			# 	if eyeSample.isRightSample():
			# 		gaze = eyeSample.getRightEye().getGaze()
			# 	elif eyeSample.isLeftSample():
			# 		gaze = eyeSample.getLeftEye().getGaze()
			# 	if gaze!=None:
			# 		if gaze[0]!=-32768.0:
			# 			gazeDistFromGazeTarget = numpy.linalg.norm(numpy.array(gaze)-gazeTarget)
			# 			if newGazeTarget:
			# 				if gazeDistFromGazeTarget<gazeTargetCriterion:
			# 					# print ['gazeTargetMet',gaze,gazeTargetCriterion,gazeTarget,gazeDistFromGazeTarget]
			# 					qFrom.put(['gazeTargetMet',gazeTarget])
			# 					newGazeTarget = False
			# 				else:
			# 					qFrom.put(['gazeTargetNotMet',gazeTarget])
			# 					# print ['gazeTargetNotMet',gaze,gazeTarget,gazeDistFromGazeTarget,gazeTargetCriterion]
			if eyeData==pylink.ENDSACC:
				eyeSample = eyelink.getFloatData()
				gazeStartTime = eyeSample.getStartTime()
				gazeStart = eyeSample.getStartGaze()
				gazeEnd = eyeSample.getEndGaze()
				# # print ['eyelink: saccade',gazeStart,gazeEnd]
				if (gazeStart[0]!=-32768.0) & (gazeEnd[0]!=-32768.0):
					gazeDistFromGazeTarget = numpy.linalg.norm(numpy.array(gazeEnd)-gazeTarget)
					if gazeDistFromGazeTarget<1000:
						if newGazeTarget:
							# # print [gazeDistFromGazeTarget,gazeTargetCriterion,gazeTarget,gazeEnd]
							if gazeDistFromGazeTarget<gazeTargetCriterion:
								# # print ['gazeTargetMet',gazeEnd,gazeTargetCriterion,gazeTarget,gazeDistFromGazeTarget]
								qFrom.put(['gazeTargetMet',gazeTarget,gazeStartTime])
								newGazeTarget = False
								# # print 'gazeTargetMet'
						elif gazeDistFromGazeTarget>gazeTargetCriterion:
							if reportSaccades:
								qFrom.put(['gazeTargetLost',gazeTarget])
								# # print ['gazeTargetLost',gazeTarget]
							if (not saccadeSound.stillPlaying()) and (not blinkSound.stillPlaying()):
								if doSounds:
									saccadeSound.play()				
			elif eyeData==pylink.STARTBLINK:
			# 	lastStartBlinkTime = time.time()
			# elif eyeData==pylink.ENDBLINK:
			# 	if (time.time()-lastStartBlinkTime)>.1:
				if reportBlinks:
					qFrom.put('blink')
					# # print 'eyelink: blink'
				if (not saccadeSound.stillPlaying()) and (not blinkSound.stillPlaying()):
					if doSounds:
						#blinkSound.play()
						qFrom.put('blink')
Exemple #10
0
    def __init__(self, win, clock, sj = "TEST", autoCalibration=True, 
                 saccadeSensitivity = HIGH, calibrationType = 'HV9',
                 calibrationTargetColor = WHITE,
                 calibrationBgColor = BLACK, CalibrationSounds = False
                 ):
        '''
        win: psychopy visual window used for the experiment
 
        clock: psychopy time clock recording time for whole experiment
 
        sj: Subject identifier string (affects EDF filename)
 
        autoCalibration:
         True: enable auto-pacing during calibration
 
        saccadeSensitivity:
         HIGH: Pursuit and neurological work
         LOW:  Cognitive research
 
        calibrationType:
         H3: Horizontal 3-point
         HV3: 3-point calibration, poor linearization
         HV5: 5-point calibration, poor at corners
         HV9: 9-point calibration, best overall
 
        calibrationTargetColor and calibrationBgColor:
         RGB tuple, i.e., (255,0,0) for Red
         One of: BLACK, WHITE, GRAY
 
        calibrationSounds:
         True: enable feedback sounds when calibrating 
 
        '''
        self.edfFileName = str(sj)+".EDF"
        print(self.edfFileName)
        inf = info.RunTimeInfo("J","1",win, refreshTest=None, 
                             userProcsDetailed=False)
        self.screenSize = inf['windowSize_pix']
        self.units = inf['windowUnits']
        self.monitorName = inf['windowMonitor.name']
        monitor = monitors.Monitor(self.monitorName)
 
        print("Connecting to eyetracker.")
        self.tracker = pylink.EyeLink()
        self.timeCorrection = clock.getTime() - self.tracker.trackerTime()
        print("Loading custom graphics")
        genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, self.screenSize)
        self.tracker.openDataFile(self.edfFileName)
        pylink.flushGetkeyQueue();
        self.tracker.setOfflineMode();
        self.tracker.sendCommand("screen_pixel_coords =  0 0 %d %d"
                                    %( tuple(self.screenSize) ))
        self.tracker.setCalibrationType(calibrationType)
        self.tracker.sendMessage("DISPLAY_COORDS  0 0 %d %d"
                                    %( tuple(self.screenSize) ))
 
        eyelink_ver = self.tracker.getTrackerVersion()
        if eyelink_ver == 3:
            tvstr = self.tracker.getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            tracker_software_ver = int(float(tvstr[(vindex + len("EYELINK CL")):].strip()))
        else: tracker_software_ver = 0
        if eyelink_ver>=2:
            self.tracker.sendCommand("select_parser_configuration %d" %saccadeSensitivity)
        else:
            if saccadeSensitivity == HIGH:
                svt, sat = 22, 5000
            else: svt, sat = 30, 9500
            self.tracker.sendCommand("saccade_velocity_threshold = %d" %svt)
            self.tracker.sendCommand("saccade_acceleration_threshold = %d" %sat)
 
        if eyelink_ver == 2: #turn off scenelink camera stuff
            self.tracker.sendCommand("scene_camera_gazemap = NO")
 
        # set EDF file contents
        self.tracker.sendCommand("file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON")
        if tracker_software_ver>=4:
            self.tracker.sendCommand("file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET")
        else:
            self.tracker.sendCommand("file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
        # set link data (used for gaze cursor)
        self.tracker.sendCommand("link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        if tracker_software_ver>=4:
            self.tracker.sendCommand("link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET")
        else:
            self.tracker.sendCommand("link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")
 
        #Set the calibration settings:
        pylink.setCalibrationColors( calibrationTargetColor, calibrationBgColor)
        if CalibrationSounds:
            pylink.setCalibrationSounds("", "", "")
            pylink.setDriftCorrectSounds("", "off", "off")
        else:
            pylink.setCalibrationSounds("off", "off", "off")
            pylink.setDriftCorrectSounds("off", "off", "off")
 
        if autoCalibration:
            self.tracker.enableAutoCalibration
        else: self.tracker.disableAutoCalibration
        win.flip()
        print("Opening graphics")
        pylink.openGraphicsEx(genv)
        print("Begining tracker setup")
        self.tracker.doTrackerSetup()
        win.flip()
else:
    getEYELINK().sendCommand("saccade_velocity_threshold = 35")
    getEYELINK().sendCommand("saccade_acceleration_threshold = 9500")
getEYELINK().setFileEventFilter(
    "LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON")
getEYELINK().setFileSampleFilter("LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
getEYELINK().setLinkEventFilter("LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
getEYELINK().setLinkSampleFilter("LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")
EYELINK.sendCommand("pupil_size_diameter = True")
pylink.setCalibrationColors(
    (255, 255, 255),
    (0, 0, 0))  # Sets the calibration target and background color
pylink.setTargetSize(int(w / 70), int(w / 300))
# select best size for calibration target
pylink.setCalibrationSounds("", "", "")
pylink.setDriftCorrectSounds("", "off", "off")

if getEYELINK().isConnected() and not getEYELINK().breakPressed():
    print('连接成功')
    getEYELINK().doTrackerSetup()
    while True:
        try:
            error = getEYELINK().doDriftCorrect(w // 2, h // 2, 1, 1)
            if error != 27:
                break
            else:
                getEYELINK().doTrackerSetup()
        except:
            getEYELINK().doTrackerSetup()
else:
    print('NO')
    def __init__(self, win, clock, sj = "TEST", saccadeSensitivity = HIGH, calibrationType = 'HV9',calibrationTargetColor = WHITE,calibrationBgColor = BLACK, CalibrationSounds = False,screen=(1024,768)):
        '''win: psychopy visual window used for the experiment
          clock: psychopy time clock recording time for whole experiment
          sj: Subject identifier string (affects EDF filename)
          saccadeSensitivity:
            HIGH: Pursuit and neurological work
            LOW:  Cognitive research
          calibrationType:
            H3: Horizontal 3-point
            HV3: 3-point calibration, poor linearization
            HV5: 5-point calibration, poor at corners
            HV9: 9-point calibration, best overall
        calibrationTargetColor and calibrationBgColor:
            RGB tuple, i.e., (255,0,0) for Red
            One of: BLACK, WHITE, GRAY
        calibrationSounds:
            True: enable feedback sounds when calibrating'''
        self.edfFileName = str(sj)+".EDF"   # Subject name only can put 8 characters
        print("Connecting to eyetracker.")
        self.tracker = pylink.EyeLink()
        self.timeCorrection = clock.getTime() - self.tracker.trackerTime()
        print("Loading custom graphics")
        #Initializes Experiment Graphics
        genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, screen)
        pylink.openGraphicsEx(genv)
        # opendatafile
        self.tracker.openDataFile(self.edfFileName)
        
        #EyeLink Tracker Configuration
        pylink.flushGetkeyQueue();# Initializes the key queue used by getkey(). It may be called at any time to get rid any of old keys from the queue.
        self.tracker.setOfflineMode();#Places EyeLink tracker in off-line (idle) mode. Wait till the tracker has finished the mode transition
        self.tracker.sendCommand("screen_pixel_coords =  0 0 %d %d"%( tuple(screen) ))
        self.tracker.setCalibrationType(calibrationType)
        self.tracker.sendCommand("driftcorrect_cr_disable=OFF") #CF - OFF: turns on drift CORRECT; AUTO: Turns on drift CHECK; ON: Turns off both
        #self.tracker.sendCommand("generate_default_targets = NO") 
        #self.tracker.sendCommand("calibration_targets = 512,384 512,417 512,351 402,384 622,384 402,417 622,417 402,351 622,351")
        #self.tracker.sendCommand("validation_targets = 512,384 512,417 512,351 402,384 622,384 402,417 622,417 402,351 622,351")

        self.tracker.sendMessage("DISPLAY_COORDS  0 0 %d %d"%( tuple(screen) ))
        eyelink_ver = self.tracker.getTrackerVersion()
        if eyelink_ver == 3:
            tvstr = self.tracker.getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            tracker_software_ver = int(float(tvstr[(vindex + len("EYELINK CL")):].strip()))
        else: tracker_software_ver = 0
        if eyelink_ver>=2:
            self.tracker.sendCommand("select_parser_configuration %d" %saccadeSensitivity)
        else:
            if saccadeSensitivity == HIGH:svt, sat = 22, 5000  
            else: svt, sat = 30, 9500
            self.tracker.sendCommand("saccade_velocity_threshold = %d" %svt)   
            self.tracker.sendCommand("saccade_acceleration_threshold = %d" %sat)
        if eyelink_ver == 2: #turn off scenelink camera stuff
            self.tracker.sendCommand("scene_camera_gazemap = NO")
 
        # set EDF file contents
        self.tracker.setFileEventFilter("LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON")
        if tracker_software_ver>=4:self.tracker.setFileSampleFilter("LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET")
        else:self.tracker.setFileSampleFilter("LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
        
        # set link data (used for gaze cursor)
        self.tracker.setLinkEventFilter("LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        if tracker_software_ver>=4:self.tracker.setLinkSampleFilter("LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET")
        else:self.tracker.setLinkSampleFilter("LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")
        
        #self.tracker.setAcceptTargetFixationButton(1) # This programs a specific button for use in drift correction.
        
          #Set the calibration settings:
        #pylink.setCalibrationColors(WHITE, BLACK) # Sets the calibration target and background color(foreground_color, background_color)
        if CalibrationSounds:
            pylink.setCalibrationSounds("", "", "")
            pylink.setDriftCorrectSounds("", "off", "off")
        else:
            pylink.setCalibrationSounds("off", "off", "off")
            pylink.setDriftCorrectSounds("off", "off", "off")
            
        print("Beginning tracker setup")
        self.tracker.doTrackerSetup()
Exemple #13
0
def eyelinkChildFunction(qTo,
                         qFrom,
                         windowSize=[200, 200],
                         windowPosition=[0, 0],
                         stimDisplayRes=[1920, 1080],
                         calibrationDisplaySize=[1920, 1080],
                         calibrationDotSize=10,
                         eyelinkIp='100.1.1.1',
                         edfFileName='temp.edf',
                         edfPath='./_Data/temp.edf',
                         saccadeSoundFile='_Stimuli/stop.wav',
                         blinkSoundFile='_Stimuli/stop.wav'):
    import sdl2
    import sdl2.ext
    import math
    import OpenGL.GL as gl
    import sdl2.sdlmixer
    import pylink
    import numpy
    import sys
    import shutil
    import subprocess
    import time
    import os
    import array
    from PIL import Image
    from PIL import ImageDraw
    try:
        import appnope
        appnope.nope()
    except:
        pass

    byteify = lambda x, enc: x.encode(enc)

    sdl2.SDL_Init(sdl2.SDL_INIT_VIDEO)
    window = sdl2.ext.Window("eyelink",
                             size=windowSize,
                             position=windowPosition,
                             flags=sdl2.SDL_WINDOW_SHOWN)
    windowID = sdl2.SDL_GetWindowID(window.window)
    windowSurf = sdl2.SDL_GetWindowSurface(window.window)
    sdl2.ext.fill(windowSurf.contents,
                  sdl2.pixels.SDL_Color(r=0, g=0, b=0, a=255))
    window.refresh()

    for i in range(10):
        sdl2.SDL_PumpEvents()  #to show the windows

    sdl2.SDL_Init(sdl2.SDL_INIT_AUDIO)
    sdl2.sdlmixer.Mix_OpenAudio(44100, sdl2.sdlmixer.MIX_DEFAULT_FORMAT, 2,
                                1024)

    class Sound:
        def __init__(self, fileName):
            self.sample = sdl2.sdlmixer.Mix_LoadWAV(
                sdl2.ext.compat.byteify(fileName, "utf-8"))
            self.started = False

        def play(self):
            self.channel = sdl2.sdlmixer.Mix_PlayChannel(-1, self.sample, 0)
            self.started = True

        def stillPlaying(self):
            if self.started:
                if sdl2.sdlmixer.Mix_Playing(self.channel):
                    return True
                else:
                    self.started = False
                    return False
            else:
                return False

    saccadeSound = Sound(saccadeSoundFile)
    blinkSound = Sound(blinkSoundFile)

    def exitSafely():
        if 'eyelink' in locals():
            if eyelink.isRecording() == 0:
                eyelink.stopRecording()
            eyelink.setOfflineMode()
            eyelink.closeDataFile()
            eyelink.receiveDataFile(edfFileName, 'temp.edf')
            eyelink.close()
            if os.path.isfile('temp.edf'):
                shutil.move('temp.edf', edfPath)
                # if os.path.isfile(edfPath):
                # 	subprocess.call('./edf2asc -y ./'+edfPath,shell=True)
        sys.exit(
        )  #process gets hung here if called when showing images from eyelink

    pylink.setDriftCorrectSounds('off', 'off', 'off')
    pylink.setCalibrationSounds('off', 'off', 'off')

    edfPath = './_Data/temp.edf'  #temporary default location, to be changed later when ID is established
    done = False
    while not done:
        try:
            # print '\neyelink: Attempting to connect to eyelink (check that wifi is off!)'
            eyelink = pylink.EyeLink(eyelinkIp)
            done = True
        except:
            while not qTo.empty():
                message = qTo.get()
                if message == 'quit':
                    exitSafely()
                else:
                    qTo.put(message)

    # print 'eyelink: connected'
    eyelink.sendCommand(
        'select_parser_configuration 0'
    )  # 0--> standard (cognitive); 1--> sensitive (psychophysical)
    # eyelink.sendCommand('sample_rate 500')
    eyelink.setLinkEventFilter("SACCADE,BLINK,FIXATION,LEFT,RIGHT")
    eyelink.openDataFile(edfFileName)
    eyelink.sendCommand(
        "screen_pixel_coords =  %d %d %d %d" %
        (stimDisplayRes[0] / 2 - calibrationDisplaySize[0] / 2,
         stimDisplayRes[1] / 2 - calibrationDisplaySize[1] / 2,
         stimDisplayRes[0] / 2 + calibrationDisplaySize[0] / 2,
         stimDisplayRes[1] / 2 + calibrationDisplaySize[1] / 2))
    eyelink.sendMessage("DISPLAY_COORDS  0 0 %d %d" %
                        (stimDisplayRes[0], stimDisplayRes[1]))
    eyelink.sendCommand("saccade_velocity_threshold = 60")
    eyelink.sendCommand("saccade_acceleration_threshold = 19500")

    class EyeLinkCoreGraphicsPySDL2(pylink.EyeLinkCustomDisplay):
        def __init__(self):
            # self.__target_beep__ = Sound('_Stimuli/type.wav')
            # self.__target_beep__done__ = Sound('qbeep.wav')
            # self.__target_beep__error__ = Sound('error.wav')
            if sys.byteorder == 'little':
                self.byteorder = 1
            else:
                self.byteorder = 0
            self.imagebuffer = array.array('I')
            self.pal = None
            self.__img__ = None

        def record_abort_hide(self):
            pass

        def play_beep(self, beepid):
            pass
            # if beepid == pylink.DC_TARG_BEEP or beepid == pylink.CAL_TARG_BEEP:
            # 	self.__target_beep__.play()
            # elif beepid == pylink.CAL_ERR_BEEP or beepid == pylink.DC_ERR_BEEP:
            # 	self.__target_beep__error__.play()
            # else:#	CAL_GOOD_BEEP or DC_GOOD_BEEP
            # 	self.__target_beep__done__.play()
        def clear_cal_display(self):
            # # print 'clear_cal_display'
            qFrom.put('clearCalDisplay')

        def setup_cal_display(self):
            # # print 'setup_cal_display'
            qFrom.put('setupCalDisplay')

        def exit_cal_display(self):
            # # print 'exit_cal_display'
            qFrom.put('exitCalDisplay')

        def erase_cal_target(self):
            # # print 'erase_cal_target'
            qFrom.put('eraseCalTarget')

        def draw_cal_target(self, x, y):
            # # print 'draw_cal_target'
            qFrom.put(['drawCalTarget', x, y])

        def setup_image_display(self, width, height):
            # # print 'eyelink: setup_image_display'
            self.img_size = (width, height)
            return (0)

        def exit_image_display(self):
            # # print 'eyelink: exit_image_display'
            pass

        def image_title(self, text):
            # # print 'eyelink: image_title'
            pass

        def set_image_palette(self, r, g, b):
            # # print 'eyelink: set_image_palette'
            self.imagebuffer = array.array('I')
            sz = len(r)
            i = 0
            self.pal = []
            while i < sz:
                rf = int(b[i])
                gf = int(g[i])
                bf = int(r[i])
                if self.byteorder:
                    self.pal.append((rf << 16) | (gf << 8) | (bf))
                else:
                    self.pal.append(
                        (bf << 24) | (gf << 16) | (rf << 8))  #for mac
                i = i + 1

        def draw_image_line(self, width, line, totlines, buff):
            # # print 'eyelink: draw_image_line'
            i = 0
            while i < width:
                if buff[i] >= len(self.pal):
                    buff[i] = len(self.pal) - 1
                self.imagebuffer.append(self.pal[buff[i] & 0x000000FF])
                i = i + 1
            if line == totlines:
                img = Image.fromstring('RGBX', (width, totlines),
                                       self.imagebuffer.tostring())
                img = img.convert('RGBA')
                self.__img__ = img.copy()
                self.__draw__ = ImageDraw.Draw(self.__img__)
                self.draw_cross_hair(
                )  #inherited method, calls draw_line and draw_losenge
                qFrom.put([
                    'image',
                    numpy.array(
                        self.__img__.resize([
                            self.__img__.size[0] * 4, self.__img__.size[1] * 4
                        ], Image.BICUBIC))
                ])
                self.__img__ = None
                self.__draw__ = None
                self.imagebuffer = array.array('I')

        def getColorFromIndex(self, colorindex):
            if colorindex == pylink.CR_HAIR_COLOR: return (255, 255, 255, 255)
            elif colorindex == pylink.PUPIL_HAIR_COLOR:
                return (255, 255, 255, 255)
            elif colorindex == pylink.PUPIL_BOX_COLOR:
                return (0, 255, 0, 255)
            elif colorindex == pylink.SEARCH_LIMIT_BOX_COLOR:
                return (255, 0, 0, 255)
            elif colorindex == pylink.MOUSE_CURSOR_COLOR:
                return (255, 0, 0, 255)
            else:
                return (0, 0, 0, 0)

        def draw_line(self, x1, y1, x2, y2, colorindex):
            # # print 'eyelink: draw_line'
            if x1 < 0: x1 = 0
            if x2 < 0: x2 = 0
            if y1 < 0: y1 = 0
            if y2 < 0: y2 = 0
            if x1 > self.img_size[0]: x1 = self.img_size[0]
            if x2 > self.img_size[0]: x2 = self.img_size[0]
            if y1 > self.img_size[1]: y1 = self.img_size[1]
            if y2 > self.img_size[1]: y2 = self.img_size[1]
            imr = self.__img__.size
            x1 = int((float(x1) / float(self.img_size[0])) * imr[0])
            x2 = int((float(x2) / float(self.img_size[0])) * imr[0])
            y1 = int((float(y1) / float(self.img_size[1])) * imr[1])
            y2 = int((float(y2) / float(self.img_size[1])) * imr[1])
            color = self.getColorFromIndex(colorindex)
            self.__draw__.line([(x1, y1), (x2, y2)], fill=color)
            return 0

        def draw_lozenge(self, x, y, width, height, colorindex):
            # # print 'eyelink: draw_lozenge'
            color = self.getColorFromIndex(colorindex)
            imr = self.__img__.size
            x = int((float(x) / float(self.img_size[0])) * imr[0])
            width = int((float(width) / float(self.img_size[0])) * imr[0])
            y = int((float(y) / float(self.img_size[1])) * imr[1])
            height = int((float(height) / float(self.img_size[1])) * imr[1])
            if width > height:
                rad = height / 2
                self.__draw__.line([(x + rad, y), (x + width - rad, y)],
                                   fill=color)
                self.__draw__.line([(x + rad, y + height),
                                    (x + width - rad, y + height)],
                                   fill=color)
                clip = (x, y, x + height, y + height)
                self.__draw__.arc(clip, 90, 270, fill=color)
                clip = ((x + width - height), y, x + width, y + height)
                self.__draw__.arc(clip, 270, 90, fill=color)
            else:
                rad = width / 2
                self.__draw__.line([(x, y + rad), (x, y + height - rad)],
                                   fill=color)
                self.__draw__.line([(x + width, y + rad),
                                    (x + width, y + height - rad)],
                                   fill=color)
                clip = (x, y, x + width, y + width)
                self.__draw__.arc(clip, 180, 360, fill=color)
                clip = (x, y + height - width, x + width, y + height)
                self.__draw__.arc(clip, 360, 180, fill=color)
            return 0

        def get_mouse_state(self):
            # pos = pygame.mouse.get_pos()
            # state = pygame.mouse.get_pressed()
            # return (pos,state[0])
            pass

        def get_input_key(self):
            ky = []
            while not qTo.empty():
                message = qTo.get()
                # print 'eyelink: '
                # print message
                if message == 'button':
                    ky.append(
                        pylink.KeyInput(32, 0)
                    )  #button translated to space keypress (for drift correct)
                # if message=='quit':
                # 	# print 'received message to exit'
                # 	exitSafely()
                # el
                elif message[0] == 'keycode':
                    keysym = message[1]
                    keycode = keysym.sym
                    if keycode == sdl2.SDLK_F1: keycode = pylink.F1_KEY
                    elif keycode == sdl2.SDLK_F2: keycode = pylink.F2_KEY
                    elif keycode == sdl2.SDLK_F3: keycode = pylink.F3_KEY
                    elif keycode == sdl2.SDLK_F4: keycode = pylink.F4_KEY
                    elif keycode == sdl2.SDLK_F5: keycode = pylink.F5_KEY
                    elif keycode == sdl2.SDLK_F6: keycode = pylink.F6_KEY
                    elif keycode == sdl2.SDLK_F7: keycode = pylink.F7_KEY
                    elif keycode == sdl2.SDLK_F8: keycode = pylink.F8_KEY
                    elif keycode == sdl2.SDLK_F9: keycode = pylink.F9_KEY
                    elif keycode == sdl2.SDLK_F10: keycode = pylink.F10_KEY
                    elif keycode == sdl2.SDLK_PAGEUP: keycode = pylink.PAGE_UP
                    elif keycode == sdl2.SDLK_PAGEDOWN:
                        keycode = pylink.PAGE_DOWN
                    elif keycode == sdl2.SDLK_UP:
                        keycode = pylink.CURS_UP
                    elif keycode == sdl2.SDLK_DOWN:
                        keycode = pylink.CURS_DOWN
                    elif keycode == sdl2.SDLK_LEFT:
                        keycode = pylink.CURS_LEFT
                    elif keycode == sdl2.SDLK_RIGHT:
                        keycode = pylink.CURS_RIGHT
                    elif keycode == sdl2.SDLK_BACKSPACE:
                        keycode = ord('\b')
                    elif keycode == sdl2.SDLK_RETURN:
                        keycode = pylink.ENTER_KEY
                    elif keycode == sdl2.SDLK_ESCAPE:
                        keycode = pylink.ESC_KEY
                    elif keycode == sdl2.SDLK_TAB:
                        keycode = ord('\t')
                    elif keycode == pylink.JUNK_KEY:
                        keycode = 0
                    ky.append(pylink.KeyInput(keycode, keysym.mod))
            return ky

    customDisplay = EyeLinkCoreGraphicsPySDL2()
    pylink.openGraphicsEx(customDisplay)
    newGazeTarget = False
    gazeTarget = numpy.array(calibrationDisplaySize) / 2.0
    gazeTargetCriterion = calibrationDotSize
    doSounds = False
    reportSaccades = False
    reportBlinks = False
    lastMessageTime = time.time()
    lastStartBlinkTime = time.time()
    while True:
        sdl2.SDL_PumpEvents()
        for event in sdl2.ext.get_events():
            if event.type == sdl2.SDL_WINDOWEVENT:
                if (event.window.event == sdl2.SDL_WINDOWEVENT_CLOSE):
                    exitSafely()
        if not qTo.empty():
            message = qTo.get()
            if message == 'quit':
                exitSafely()
            elif message[0] == 'edfPath':
                edfPath = message[1]
            elif message[0] == 'doSounds':
                doSounds = message[1]
            elif message[0] == 'reportSaccades':
                reportSaccades = message[1]
            elif message[0] == 'reportBlinks':
                reportBlinks = message[1]
            elif message[0] == 'sendMessage':
                eyelink.sendMessage(message[1])
            elif message[0] == 'doDriftCorrect':
                # print 'eyelink: drift correct requested'
                if eyelink.isRecording() == 0:
                    eyelink.stopRecording()
                try:
                    location = message[1]
                    error = eyelink.doDriftCorrect(location[0], location[1], 0,
                                                   1)
                    # print error
                    # print 'eyelink: drift correct attempted'
                    if error != 27:
                        qFrom.put('driftCorrectComplete')
                    else:
                        qFrom.put('doCalibration')
                except:
                    qFrom.put('doCalibration')
            elif message == 'startRecording':
                # print 'eyelink: received message to begin recording'
                eyelink.startRecording(
                    1, 1, 1, 1
                )  #this retuns immediately takes 10-30ms to actually kick in on the tracker
                while not (eyelink.isRecording() == 0):
                    pass
                    # print eyelink.isRecording()
                qFrom.put('recordingStarted')
            elif message[0] == 'newGazeTarget':
                # # print message
                newGazeTarget = True
                gazeTarget = numpy.array(message[1])
                gazeTargetCriterion = numpy.array(message[2])
                # # print message
                # # print 'waiting for gaze confirmation'
            elif message[0] == 'acceptTrigger':
                eyelink.accept_trigger()
            elif message == 'doCalibration':
                doSounds = False
                if eyelink.isRecording() == 0:
                    eyelink.stopRecording()
                eyelink.doTrackerSetup()
                # # print 'calComplete'
                qFrom.put('calibrationComplete')
        if eyelink.isRecording(
        ) == 0:  #stupid, I know, but eyelink.isRecording() returns 0 if it *is* indeed recording!
            eyeData = eyelink.getNextData()
            # if eyeData==pylink.SAMPLE_TYPE:
            # 	eyeSample = eyelink.getFloatData()
            # 	gaze = None
            # 	if eyeSample.isRightSample():
            # 		gaze = eyeSample.getRightEye().getGaze()
            # 	elif eyeSample.isLeftSample():
            # 		gaze = eyeSample.getLeftEye().getGaze()
            # 	if gaze!=None:
            # 		if gaze[0]!=-32768.0:
            # 			gazeDistFromGazeTarget = numpy.linalg.norm(numpy.array(gaze)-gazeTarget)
            # 			if newGazeTarget:
            # 				if gazeDistFromGazeTarget<gazeTargetCriterion:
            # 					# print ['gazeTargetMet',gaze,gazeTargetCriterion,gazeTarget,gazeDistFromGazeTarget]
            # 					qFrom.put(['gazeTargetMet',gazeTarget])
            # 					newGazeTarget = False
            # 				else:
            # 					qFrom.put(['gazeTargetNotMet',gazeTarget])
            # 					# print ['gazeTargetNotMet',gaze,gazeTarget,gazeDistFromGazeTarget,gazeTargetCriterion]
            if eyeData == pylink.ENDSACC:
                eyeSample = eyelink.getFloatData()
                gazeStartTime = eyeSample.getStartTime()
                gazeStart = eyeSample.getStartGaze()
                gazeEnd = eyeSample.getEndGaze()
                # # print ['eyelink: saccade',gazeStart,gazeEnd]
                if (gazeStart[0] != -32768.0) & (gazeEnd[0] != -32768.0):
                    gazeDistFromGazeTarget = numpy.linalg.norm(
                        numpy.array(gazeEnd) - gazeTarget)
                    if gazeDistFromGazeTarget < 1000:
                        if newGazeTarget:
                            # # print [gazeDistFromGazeTarget,gazeTargetCriterion,gazeTarget,gazeEnd]
                            if gazeDistFromGazeTarget < gazeTargetCriterion:
                                # # print ['gazeTargetMet',gazeEnd,gazeTargetCriterion,gazeTarget,gazeDistFromGazeTarget]
                                qFrom.put([
                                    'gazeTargetMet', gazeTarget, gazeStartTime
                                ])
                                newGazeTarget = False
                                # # print 'gazeTargetMet'
                        elif gazeDistFromGazeTarget > gazeTargetCriterion:
                            if reportSaccades:
                                qFrom.put(['gazeTargetLost', gazeTarget])
                                # # print ['gazeTargetLost',gazeTarget]
                            if (not saccadeSound.stillPlaying()) and (
                                    not blinkSound.stillPlaying()):
                                if doSounds:
                                    saccadeSound.play()
            elif eyeData == pylink.STARTBLINK:
                # 	lastStartBlinkTime = time.time()
                # elif eyeData==pylink.ENDBLINK:
                # 	if (time.time()-lastStartBlinkTime)>.1:
                if reportBlinks:
                    qFrom.put('blink')
                    # # print 'eyelink: blink'
                if (not saccadeSound.stillPlaying()) and (
                        not blinkSound.stillPlaying()):
                    if doSounds:
                        #blinkSound.play()
                        qFrom.put('blink')
Exemple #14
0
    def __init__(self,
                 win,
                 clock,
                 sj="TEST",
                 autoCalibration=True,
                 saccadeSensitivity=HIGH,
                 calibrationType='HV9',
                 calibrationTargetColor=WHITE,
                 calibrationBgColor=BLACK,
                 CalibrationSounds=False):
        '''
        win: psychopy visual window used for the experiment
 
        clock: psychopy time clock recording time for whole experiment
 
        sj: Subject identifier string (affects EDF filename)
 
        autoCalibration:
         True: enable auto-pacing during calibration
 
        saccadeSensitivity:
         HIGH: Pursuit and neurological work
         LOW:  Cognitive research
 
        calibrationType:
         H3: Horizontal 3-point
         HV3: 3-point calibration, poor linearization
         HV5: 5-point calibration, poor at corners
         HV9: 9-point calibration, best overall
 
        calibrationTargetColor and calibrationBgColor:
         RGB tuple, i.e., (255,0,0) for Red
         One of: BLACK, WHITE, GRAY
 
        calibrationSounds:
         True: enable feedback sounds when calibrating 
 
        '''
        self.edfFileName = str(sj) + ".EDF"
        print(self.edfFileName)
        inf = info.RunTimeInfo("J",
                               "1",
                               win,
                               refreshTest=None,
                               userProcsDetailed=False)
        self.screenSize = inf['windowSize_pix']
        self.units = inf['windowUnits']
        self.monitorName = inf['windowMonitor.name']
        monitor = monitors.Monitor(self.monitorName)

        print("Connecting to eyetracker.")
        self.tracker = pylink.EyeLink()
        self.timeCorrection = clock.getTime() - self.tracker.trackerTime()
        print("Loading custom graphics")
        genv = EyeLinkCoreGraphicsPsychopy(self.tracker, win, self.screenSize)
        self.tracker.openDataFile(self.edfFileName)
        pylink.flushGetkeyQueue()
        self.tracker.setOfflineMode()
        self.tracker.sendCommand("screen_pixel_coords =  0 0 %d %d" %
                                 (tuple(self.screenSize)))
        self.tracker.setCalibrationType(calibrationType)
        self.tracker.sendMessage("DISPLAY_COORDS  0 0 %d %d" %
                                 (tuple(self.screenSize)))

        eyelink_ver = self.tracker.getTrackerVersion()
        if eyelink_ver == 3:
            tvstr = self.tracker.getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            tracker_software_ver = int(
                float(tvstr[(vindex + len("EYELINK CL")):].strip()))
        else:
            tracker_software_ver = 0
        if eyelink_ver >= 2:
            self.tracker.sendCommand("select_parser_configuration %d" %
                                     saccadeSensitivity)
        else:
            if saccadeSensitivity == HIGH:
                svt, sat = 22, 5000
            else:
                svt, sat = 30, 9500
            self.tracker.sendCommand("saccade_velocity_threshold = %d" % svt)
            self.tracker.sendCommand("saccade_acceleration_threshold = %d" %
                                     sat)

        if eyelink_ver == 2:  #turn off scenelink camera stuff
            self.tracker.sendCommand("scene_camera_gazemap = NO")

        # set EDF file contents
        self.tracker.sendCommand(
            "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON"
        )
        if tracker_software_ver >= 4:
            self.tracker.sendCommand(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET"
            )
        else:
            self.tracker.sendCommand(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS")
        # set link data (used for gaze cursor)
        self.tracker.sendCommand(
            "link_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,BUTTON")
        if tracker_software_ver >= 4:
            self.tracker.sendCommand(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET"
            )
        else:
            self.tracker.sendCommand(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS")

        #Set the calibration settings:
        pylink.setCalibrationColors(calibrationTargetColor, calibrationBgColor)
        if CalibrationSounds:
            pylink.setCalibrationSounds("", "", "")
            pylink.setDriftCorrectSounds("", "off", "off")
        else:
            pylink.setCalibrationSounds("off", "off", "off")
            pylink.setDriftCorrectSounds("off", "off", "off")

        if autoCalibration:
            self.tracker.enableAutoCalibration
        else:
            self.tracker.disableAutoCalibration
        win.flip()
        print("Opening graphics")
        pylink.openGraphicsEx(genv)
        print("Begining tracker setup")
        self.tracker.doTrackerSetup()
        win.flip()
Exemple #15
0
    def Start_exp(self):
        # ---------------------------------------------------
        # point de départ de l'expérience
        # ---------------------------------------------------
        pylink.openGraphics((self.screen_width_px, self.screen_height_px),
                            32)  # Initialise les graphiques
        self.eyelink.openDataFile(self.edfFileName)  # Ouvre le fichier EDF.

        # réinitialise les touches et réglez le mode de suivi en mode hors connexion.
        pylink.flushGetkeyQueue()
        self.eyelink.setOfflineMode()

        # Définit le système de coordonnées d'affichage et envoie un message à cet effet au fichier EDF;
        self.eyelink.sendCommand(
            "screen_pixel_coords =  0 0 %d %d" %
            (self.screen_width_px - 1, self.screen_height_px - 1))
        self.eyelink.sendMessage(
            "DISPLAY_COORDS  0 0 %d %d" %
            (self.screen_width_px - 1, self.screen_height_px - 1))

        # ---------------------------------------------------
        # NETOYER ??? version = 3
        # ---------------------------------------------------
        tracker_software_ver = 0
        eyelink_ver = self.eyelink.getTrackerVersion()

        if eyelink_ver == 3:
            tvstr = self.eyelink.getTrackerVersionString()
            vindex = tvstr.find("EYELINK CL")
            tracker_software_ver = int(
                float(tvstr[(vindex + len("EYELINK CL")):].strip()))

        if eyelink_ver >= 2:
            self.eyelink.sendCommand("select_parser_configuration 0")
            if eyelink_ver == 2:  # Éteignez les caméras scenelink
                self.eyelink.sendCommand("scene_camera_gazemap = NO")
        else:
            self.eyelink.sendCommand("saccade_velocity_threshold = 35")
            self.eyelink.sendCommand("saccade_acceleration_threshold = 9500")

        # Définir le contenu du fichier EDF
        self.eyelink.sendCommand(
            "file_event_filter = LEFT,RIGHT,FIXATION,SACCADE,BLINK,MESSAGE,BUTTON,INPUT"
        )
        if tracker_software_ver >= 4:
            self.eyelink.sendCommand(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,HTARGET,INPUT"
            )
        else:
            self.eyelink.sendCommand(
                "file_sample_data  = LEFT,RIGHT,GAZE,AREA,GAZERES,STATUS,INPUT"
            )

        # Définir les données du lien (utilisé pour le curseur du regard)
        self.eyelink.sendCommand(
            "link_event_filter = LEFT,RIGHT,FIXATION,FIXUPDATE,SACCADE,BLINK,BUTTON,INPUT"
        )
        if tracker_software_ver >= 4:
            self.eyelink.sendCommand(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,HTARGET,INPUT"
            )
        else:
            self.eyelink.sendCommand(
                "link_sample_data  = LEFT,RIGHT,GAZE,GAZERES,AREA,STATUS,INPUT"
            )

        #############################
        # Calibration
        #############################
        pylink.setCalibrationColors(
            (255, 255, 255), (128, 128, 128)
        )  # Définit couleur de la cible d'étalonnage (blanc) et de l'arrière-plan (gris)
        pylink.setTargetSize(self.screen_width_px // 70,
                             self.screen_width_px //
                             300)  # Définit taille de la cible d'étalonnage
        pylink.setCalibrationSounds("", "", "")
        pylink.setDriftCorrectSounds("", "off", "off")