def show_gamble_screen(self, trial_info, response, tracker): self.mouse.set_visible(True) gamble_log = [] gamble_value = None gamble_start_time = libtime.get_time() loc = 'right' if response == 0 else 'left' self.initialize_gamble_screen(loc) while gamble_value is None: self.disp.fill(self.gamble_screen) self.disp.show() t = libtime.get_time() - gamble_start_time mouse_position = self.mouse.get_pos() eye_position = tracker.sample() pupil_size = tracker.pupil_size() gamble_log.append([ trial_info['subj_id'], trial_info['session_no'], trial_info['block_no'], trial_info['trial_no'], str(t), mouse_position[0], mouse_position[1], eye_position[0], eye_position[1], pupil_size ]) for i, gamble in enumerate(self.gambles): if self.mouse.mouse.isPressedIn(self.gamble_rects[i]): gamble_value = gamble return gamble_log, t, gamble_value
def trackersample(t0, q, tstart=0, tracker=None, tsample=3): t1 = libtime.get_time() while t1 - t0 < 10000: t1 = libtime.get_time() if t1 - t0 > 0 + tstart: if tracker.sample()[0] >= 0: if tracker.sample()[1] >= 0: q.put(tracker.sample()) tstart = tstart + tsample
def ValidationFixation(self, screen, t): ''' Paramètres : screen : écran virtuel d'affichage t : temps de fixation en millisecondes Description : Permet d'afficher un point sur l'écran qui devient vert lorsque l'on regarde ce point et attendre de le fixer pendant "t" millisecondes. ''' tfix = 0 while tfix < t: # Tant que l'on regarde pas le cercle pendant 1 seconde newTime = libtime.get_time() gazepos = self.tracker.sample() # si l'individu ne regarde pas le point central if (gazepos[0] < self.norm_2_px( (0.44, 0.44))[0] or gazepos[0] > self.norm_2_px( (0.56, 0.56))[0]) and (gazepos[1] < self.norm_2_px( (0.44, 0.44))[1] or gazepos[1] > self.norm_2_px( (0.56, 0.56))[1]): screen = libscreen.Screen() #le cercle reste blanc screen.draw_circle(colour='white', pos=self.norm_2_px((0.5, 0.5)), r=40, pw=2, fill=True) self.disp.fill(screen=screen) self.disp.show() tfix = 0 else: # si l'individu regarde le point central screen = libscreen.Screen() #Le cercle devient vert screen.draw_circle(colour='green', pos=self.norm_2_px((0.5, 0.5)), r=40, pw=2, fill=True) self.disp.fill(screen=screen) self.disp.show() tfix += (libtime.get_time() - newTime) if self.kb.get_key(keylist=['space'], flush=False)[0]: screen.clear() self.disp.fill(screen=screen) self.disp.show() return () screen.clear()
def fonction_essai(self): ScreenVisage = libscreen.Screen() self.tracker.start_recording() ListeNomImg = [ '/home/eyetracker/Bureau/Program/Images/pizza.jpg', '/home/eyetracker/Bureau/Program/Images/ExpVisages/Image_Homme_2.bmp' ] cpt = 0 for Img in ListeNomImg: table = Workbook() gazePosSheet = table.active gazePosSheet.title = 'gazePos' gazePosSheet.append([ "Time", "XOeilDroit", "YOeilDroit", "XOeilGauche", "YOeilGauche", "xRetenu", "Yretenu", "Etat" ]) self.ValidationFixation(ScreenVisage, 1000) LisTOeilDroit = [] LisTOeilGauche = [] ListeOeilRetenu = [] temps = [] ScreenVisage.draw_image(image=Img) self.disp.fill(screen=ScreenVisage) self.disp.show() tdeb = libtime.get_time() oldTimeStamp = 0 while libtime.get_time() - tdeb < 10000: time.sleep(0.010) t = libtime.get_time() NewTimeStamp, Newgazepos = self.tracker.binocular_sample() if NewTimeStamp != oldTimeStamp: etat = self.etat_yeux(Newgazepos[0], Newgazepos[1]) gazePosSheet.append([ t, Newgazepos[0][0], Newgazepos[0][1], Newgazepos[1][0], Newgazepos[1][1], Newgazepos[2][0], Newgazepos[2][1], etat ]) oldTimeStamp = NewTimeStamp ScreenVisage.clear() self.disp.fill(screen=ScreenVisage) self.disp.show() table.save('/home/eyetracker/Bureau/' + self.nameInd + '_' + str(cpt) + '_Donnees.xls') cpt = 1 self.tracker.stop_recording()
def dostuff(event, t0, q): event.wait() while event.is_set(): if not q.empty(): t1 = libtime.get_time() currenttime.append(round((t1 - t0), 2)) trackerpos.append(q.get()) emotionslist.append(emotion)
def show_stimulus_screen(self, trial_info, tracker): self.mouse.set_visible(True) current_sequence_dots, dot_positions, current_sequence = \ self.rdk.initialize_rdk_stim(trial_info['direction'], trial_info['coherence']) response_dynamics_log = [] response = None stim_start_time = libtime.get_time() t = 0 signal_played = False while response is None: if self.ready_button.contains( self.mouse.mouse) and t > T_SIGNAL and not signal_played: self.warning_sound.play() signal_played = True current_sequence_dots, dot_positions, current_sequence = \ self.rdk.update_rdk_stim(current_sequence_dots, dot_positions, current_sequence) self.disp.fill(screen=self.stimuli_screen) self.disp.show() # collect mouse and eye tracker samples t = libtime.get_time() - stim_start_time mouse_position = self.mouse.get_pos() eye_position = tracker.sample() pupil_size = tracker.pupil_size() response_dynamics_log.append([ trial_info['subj_id'], trial_info['session_no'], trial_info['block_no'], trial_info['trial_no'], str(t), mouse_position[0], mouse_position[1], eye_position[0], eye_position[1], pupil_size ]) if self.mouse.mouse.isPressedIn(self.left_response_rect): response = 180 elif self.mouse.mouse.isPressedIn(self.right_response_rect): response = 0 return response_dynamics_log, response, t
def FinCalibration(self): self.screen.clear() self.disp.fill(self.screen) self.disp.show() self.start_recording() ListePos = [] self.kb.get_key(keylist=['space'], flush=True, timeout=None) self.calibrated_pointpx = [ self._norm_2_px(p) for p in [(0.1, 0.1), (0.9, 0.1), (0.5, 0.5), (0.1, 0.9), (0.9, 0.9)] ] for pos in self.calibrated_pointpx: ListeProv = [] self.screen.clear() self.ReduceBall(pos, 40, 'yellow') self.disp.fill(self.screen) self.disp.show() # wait for pressing 'space' to collect data self.kb.get_key(keylist=['space'], flush=True, timeout=None) tdeb = libtime.get_time() # Lancement de l'échantillonage / capture de la position des yeux oldTimeStamp = 0 txp, gase = self.binocular_sample() while libtime.get_time() - tdeb < 300: time.sleep(0.01) NewTimeStamp, Newgazepos = self.binocular_sample() if NewTimeStamp != oldTimeStamp: oldTimeStamp = NewTimeStamp ListeProv += [[ pos, Newgazepos[0], Newgazepos[1], Newgazepos[2] ]] # calculate mean accuracy ListePos += [ListeProv] self.screen.clear() self.disp.fill(screen=self.screen) self.disp.show() self.stop_recording() return (ListePos)
def RecupDonneesOeil(self, indice): self.point = self.points_to_calibrate[indice - 1] ListeProv = [] self.start_recording() tdeb = libtime.get_time() # Lancement de l'échantillonage / capture de la position des yeux oldTimeStamp = 0 txp, gase = self.binocular_sample() while libtime.get_time() - tdeb < 300: time.sleep(0.01) NewTimeStamp, Newgazepos = self.binocular_sample() if NewTimeStamp != oldTimeStamp: oldTimeStamp = NewTimeStamp ListeProv += [[ self.point, Newgazepos[0], Newgazepos[1], Newgazepos[2] ]] self.stop_recording() return (ListeProv)
def presentTrial(self,curTrial,curTrialIndex): #self.checkExit() #self.experiment.disp.show() #random jitter prior to trial start libtime.pause(self.ISI+random.choice([0,100,200])) #######start eye tracking########## if self.experiment.subjVariables['eyetracker']=="yes": self.experiment.tracker.start_recording() #log data on trial self.experiment.tracker.log("Experiment %s subjCode %s seed %s TrialNumber %s TrialType %s audio %s image %s" % (self.experiment.expName, self.experiment.subjVariables['subjCode'],str(self.experiment.subjVariables['seed']),str(curTrialIndex),curTrial['audioType'], curTrial['audio'],curTrial['image'])) #start trial timer trialTimerStart=libtime.get_time() #create ag screen #agScreen=libscreen.Screen(disptype='psychopy') agScreenTime=libtime.get_time() if self.experiment.subjVariables['eyetracker']=="yes": #log event self.experiment.tracker.log("agStart") agCount = 0 keyBreak = False movPlaying = True self.mov.play() while self.mov.status != visual.FINISHED and libtime.get_time() - agScreenTime < self.AGTimeOut: self.mov.draw() self.experiment.win.flip() libtime.pause(10) if self.experiment.subjVariables['activeMode']=="input" and self.experiment.subjVariables['inputDevice']=="keyboard": for key in event.getKeys(): if key == 'space': if self.mov.status == visual.PLAYING: self.mov.pause() self.experiment.win.flip() movPlaying = False keyBreak = True if keyBreak: break else: if self.experiment.subjVariables['activeMode']=="gaze": inputpos = self.experiment.tracker.sample() elif self.experiment.subjVariables['activeMode']=="input" and self.experiment.subjVariables['inputDevice']=="mouse": inputpos = self.experiment.input.get_pos() if self.aoiCenterMovie.contains(inputpos): agCount += 1 if agCount > self.AGFixCount: #if self.mov.status == visual.PLAYING: break if movPlaying: if self.mov.status == visual.PLAYING: self.mov.pause() self.experiment.win.flip() #print libtime.get_time() - agScreenTime if self.experiment.subjVariables['eyetracker']=="yes": #log event self.experiment.tracker.log("agEnd") #create starting screen startScreen=libscreen.Screen(disptype='psychopy') curPic=self.pictureMatrix[str(curTrial['image'])][0] curPicCoord=self.pos['center'] curPic.setPos(curPicCoord) curPic.size = (300,300) curBox = self.pictureMatrix[str(curTrial['box'])][0] curBox.size = self.posDims curBox.pos = self.pos['center'] buildScreenPsychoPy(startScreen,[self.rect,curPic,curBox,self.grayRect]) #present starting screen setAndPresentScreen(self.experiment.disp, startScreen) startScreenTime=libtime.get_time() if self.experiment.subjVariables['eyetracker']=="yes": #log event self.experiment.tracker.log("startScreen") libtime.pause(self.screenPause) #slide screen up for i in range(0,self.BoxStepCount+1,self.BoxStep): #set up box curBox.pos=(self.pos['center'][0],i) #add new screen curScreen=libscreen.Screen(disptype='psychopy') #add stimuli to the screen buildScreenPsychoPy(curScreen,[self.rect,curPic,curBox,self.grayRect]) setAndPresentScreen(self.experiment.disp,curScreen) if i==self.BoxStepCount: screenUpTime=libtime.get_time() if self.experiment.subjVariables['eyetracker']=="yes": #log screen slide event self.experiment.tracker.log("screenUp") #play audio self.soundMatrix[curTrial['audio']].play() audioStartTime=libtime.get_time() if self.experiment.subjVariables['eyetracker']=="yes": #log audio event self.experiment.tracker.log("audioStart") ######Contingent Procedure###### lookProcedureTimes = self.watchProcedure(curTrial,audioStartTime,curTrial['audioDur'],1000,self.aoiScreen,looming=True,curPic=curPic,stim1=self.rect,stim2=curBox,stim3=self.grayRect) #print lookProcedureTimes self.soundMatrix[curTrial['audio']].stop() self.experiment.disp.show() audioEndTime=libtime.get_time()-audioStartTime if self.experiment.subjVariables['eyetracker']=="yes": #log audio end event self.experiment.tracker.log("audioEnd") ######Stop Eyetracking###### #trialEndTime trialTimerEnd=libtime.get_time() #trial time trialTime=trialTimerEnd-trialTimerStart if self.experiment.subjVariables['eyetracker']=="yes": #stop eye tracking self.experiment.tracker.stop_recording() #######Save data######### fieldVars=[] for curField in self.trialFieldNames: fieldVars.append(curTrial[curField]) [header, curLine] = createRespNew(self.experiment.subjInfo, self.experiment.subjVariables, self.trialFieldNames, fieldVars, a_curTrialIndex=curTrialIndex, b_expTimer=trialTimerEnd, c_trialStart=trialTimerStart, d_trialTime=trialTime, e_totalTime = lookProcedureTimes[0], f_lookAways = lookProcedureTimes[1], g_totalLookingTime = lookProcedureTimes[2], h_totalLookingTimeNS = lookProcedureTimes[3], i_agScreenTime = agScreenTime, j_startScreenTime = startScreenTime, k_audioStartTime=audioStartTime, l_audioEndTime = audioEndTime ) writeToFile(self.experiment.outputFile,curLine)
def watchProcedure(self,curTrial,startTime,maxTime,maxLookAwayTime, aoi,looming=False,curPic=None,stim1=None,stim2=None,stim3=None): totalLookingTime = 0 nonLookingTimes = [] transitionNonLookingTimes = [] lookAways = 0 curNonLookingTime = 0 looking = True nonLook = False curLookAwayTime = 0 responded = True counter=0.0 direction=1 startValues=(300,300) loomCounter = 0 transition=False #list to store last 150 ms of looking last150ms=[] #store current location to initiate checking of when looks go off screen if self.experiment.subjVariables['activeMode']=="gaze": lastInputpos = self.experiment.tracker.sample() if lastInputpos == self.lookAwayPos: transitionNonLookingTimeOnset = libtime.get_time() elif self.experiment.subjVariables['activeMode']=="input" and self.experiment.subjVariables['inputDevice']=="mouse": lastInputpos = self.experiment.input.get_pos() if lastInputpos == self.lookAwayPos: transitionNonLookingTimeOnset = libtime.get_time() while libtime.get_time() - startTime < maxTime and curLookAwayTime < maxLookAwayTime: if self.experiment.subjVariables['activeMode']=="input" and self.experiment.subjVariables['inputDevice']=="keyboard": for key in event.getKeys(): if key == 'space' and looking ==True: responded = False event.clearEvents() transitionNonLookingTimeOnset = libtime.get_time() elif key == 'space' and looking ==False: responded = True event.clearEvents() else: libtime.pause(10) #get gaze/ mouse position if self.experiment.subjVariables['activeMode']=="gaze": curInputpos = self.experiment.tracker.sample() elif self.experiment.subjVariables['activeMode']=="input": curInputpos = self.experiment.input.get_pos() #mark transition time if curInputpos == self.lookAwayPos and lastInputpos != self.lookAwayPos: transition=True transitionNonLookingTimeOnset = libtime.get_time() else: transition = False ####smoothing eyetracking/mousetracking sample### ##add cur gaze position to the list last150ms.append(curInputpos) # ##if the length of the list exceeds 150 ms/25==6, then delete the earliest item in the lis ## 25 ms because an average run through the while loop takes between 20-30 ms if len(last150ms)>6: del last150ms[0] ##Now, remove the (no looking data) tuples last150msClean=[e for e in last150ms if e != self.lookAwayPos] ##Now calculate the mean if len(last150msClean)>0: #calculate mean #looks a bit tricky, but that's just because I think the gaze positions are stored as tuples, which is a bit of an odd data structure. inputpos=tuple(map(lambda y: sum(y) / float(len(y)), zip(*last150msClean))) else: inputpos=self.lookAwayPos ####smoothing procedure end### responded = aoi.contains(inputpos) #update last gaze position lastInputpos = curInputpos if not responded and looking: nonLookingTimeOnset = libtime.get_time() looking = False lookAways +=1 nonLook = True if responded: if not looking: looking = True nonLookOnset = False curNonLookingTime=libtime.get_time()-nonLookingTimeOnset curTransitionNonLookingTime=libtime.get_time()- transitionNonLookingTimeOnset nonLookingTimes.append(curNonLookingTime) transitionNonLookingTimes.append(curTransitionNonLookingTime) if looking: curLookAwayTime = 0 else: curLookAwayTime = libtime.get_time() - nonLookingTimeOnset if curLookAwayTime > maxLookAwayTime: nonLookingTimes.append(curLookAwayTime) curTransitionNonLookingTime=libtime.get_time()- transitionNonLookingTimeOnset transitionNonLookingTimes.append(curTransitionNonLookingTime) if looming: #update screen newScreen=libscreen.Screen(disptype='psychopy') counter +=1 if counter > 100: direction=(-1)*direction counter = 0.0 startValues=(xSize,ySize) xSize = self.easeInOut(startValues[0],200.0,counter,100,direction) ySize = self.easeInOut(startValues[1],200.0,counter,100,direction) curPic.size = (xSize,ySize) buildScreenPsychoPy(newScreen,[stim1,curPic,stim2,stim3]) setAndPresentScreen(self.experiment.disp, newScreen) totalTime=libtime.get_time()-startTime totalLookingTime = totalTime - sum(nonLookingTimes) totalLookingTimeNonSmoothed = totalTime - sum(transitionNonLookingTimes) return [totalTime, lookAways, totalLookingTime,totalLookingTimeNonSmoothed]
def start_fixation(self, position): posString = "(" + str(position[0]) + "," + str(position[1]) + ") " data = LogData(posString, (libtime.get_time())) self.position_log.append(data)
def CDPSGVS(self): ''' Description: Après avoir fixé un point central pendant un certains temps (convenu dans le fichier .json), 4 formes apparaissent sur l'écran (rond, carré, triangle et losange) Les résulats de l'expérience sont sauvegardées dans un fichier excel ''' if self.filename == 0: tkinter.messagebox.showerror( title="Erreur", message='Veuillez selectionner un fichier de calibration') else: date = datetime.datetime.now().strftime("%d-%m-%Y_%H:%M:%S") # Lecture des paramètres de l'expérience filename = self.Config.getConfigFilename('SGVS') try: with open(filename, "r") as fichier: data = json.load(fichier) CheminSave = self.Config.getDataDirname( str(data["result_dirname"])) + '/' duration = data["duration"] nbTrial = data["nb_trial"] color = data["color"] fixationStart = data["fixation_start"] frequency = data["frequency"] except IOError: print("fichier %s introuvable", filename) date = datetime.datetime.now().strftime("%d-%m-%Y_%H:%M:%S") # Creation du fichier excel de sauvegarde des résultats table = Workbook() gazePosSheet = table.active gazePosSheet.title = 'gazePos' posStimuli = table.create_sheet("posStimuli") gazePosSheet.append([ "Time", "XOeilDroit", "YOeilDroit", "XOeilGauche", "YOeilGauche", "xRetenu", "Yretenu", "Etat" ]) posStimuli.append([ "Id Stimuli", "Temps apparition Stimuli", "Position X Stiumli", "Position y Stiumli", "Temps Disparition Stimuli" ]) screenSGVS = libscreen.Screen() NbrDiag = 0 ### Nombre de fois d apparition des stimulis en diagonales NbrDroits = 0 ##" nombre de fois d apparition des stimulis en "croix droite" self.tracker.start_recording() PositionDiag = [ (0.1, 0.1), (0.9, 0.1), (0.1, 0.9), (0.9, 0.9) ] ### Posiitions possible des 4 stimulis en croix diagonale PositionDroit = [ (0.5, 0.1), (0.9, 0.5), (0.5, 0.9), (0.1, 0.5) ] ### Posiitions possible des 4 stimulis en croix droite # Boucle sur le nombre d'essais for i in range(nbTrial * 2): self.ValidationFixation(screenSGVS, fixationStart) a = random.randint( 0, 1 ) ### permet de choisir si les stimuli vont etre choisis en diag ou droit PosRetenues = [] if NbrDiag == nbTrial: a = 1 if NbrDroits == nbTrial: a = 0 # Choix aléatoire d'une des 2 listes et mélange des positions des formes if a == 0: NbrDiag += 1 PosRetenues = PositionDiag else: NbrDroits += 1 PosRetenues = PositionDroit random.shuffle(PosRetenues) PosStimuliA = self.norm_2_px(PosRetenues[0]) PosStimuliB = self.norm_2_px(PosRetenues[1]) PosStimuliC = self.norm_2_px(PosRetenues[2]) PosStimuliD = self.norm_2_px(PosRetenues[3]) screenSGVS.draw_rect(x=PosStimuliA[0], y=PosStimuliA[1], w=80, h=80, fill=True, colour=color) ## dessin d'un carré screenSGVS.draw_circle(colour=color, pos=PosStimuliB, r=40, pw=5, fill=True) ## dessin d'un cercle screenSGVS.draw_polygon( [(PosStimuliC[0] - 40, PosStimuliC[1] + 40), (PosStimuliC[0] + 40, PosStimuliC[1] + 40), (PosStimuliC[0], PosStimuliC[1] - 40)], fill=True, colour=color) ## dessin d'un triangle screenSGVS.draw_polygon( [(PosStimuliD[0], PosStimuliD[1] - 60), (PosStimuliD[0] - 30, PosStimuliD[1]), (PosStimuliD[0], PosStimuliD[1] + 60), (PosStimuliD[0] + 30, PosStimuliD[1])], fill=True, colour=color) ## dessin d'un losange TempsApp = libtime.get_time() self.disp.fill(screen=screenSGVS) self.disp.show() tdeb = libtime.get_time() # Lancement de l'échantillonage / capture de la position des yeux oldTimeStamp = 0 txp, gase = self.tracker.binocular_sample() while libtime.get_time() - tdeb < duration: time.sleep(frequency) NewTimeStamp, Newgazepos = self.tracker.binocular_sample() if NewTimeStamp != oldTimeStamp: t = int(NewTimeStamp - txp) / 1000 etat = self.etat_yeux(Newgazepos[0], Newgazepos[1]) gazePosSheet.append([ t, Newgazepos[0][0], Newgazepos[0][1], Newgazepos[1][0], Newgazepos[1][1], Newgazepos[2][0], Newgazepos[2][1], etat ]) oldTimeStamp = NewTimeStamp screenSGVS.clear() self.disp.fill(screen=screenSGVS) self.disp.show() TempsDisp = libtime.get_time() time.sleep(2) # Ajoute la coordonnées des formes dans l'excel posStimuli.append( ["1", TempsApp, PosStimuliA[0], PosStimuliA[1], TempsDisp]) # le 1 représente le carré posStimuli.append( ["2", TempsApp, PosStimuliB[0], PosStimuliB[1], TempsDisp]) # le 2 représente le cercle posStimuli.append( ["3", TempsApp, PosStimuliC[0], PosStimuliC[1], TempsDisp]) # le 3 représente le triangle posStimuli.append( ["4", TempsApp, PosStimuliD[0], PosStimuliD[1], TempsDisp]) # le 4 représente le losange self.tracker.stop_recording() table.save(CheminSave + self.nameInd + '_' + date + '_Donnees.xls') tkinter.messagebox.showinfo("Fichier sauvegarde", "Le fichier a bien été sauvegardé")
disp.show() kb_space.get_key() ITD_array = np.linspace(-1, 1, num=80) ITD_size = 400 np.random.shuffle(ITD_array) response_array = [] sound = wf.waveform(wavetype='wn', duration=0.1) box = visual.Circle(pygaze.expdisplay, radius=200) for itd in ITD_array: disp.show() libtime.pause(100 + np.random.uniform(0, 200)) box_time = 800 + libtime.get_time() sound_time = 800 + (itd * ITD_size) + libtime.get_time() box_shown = 0 sound_played = 0 scr = libscreen.Screen() scr.clear() starttime = libtime.get_time() maxtrialtime = 1350 while libtime.get_time() < starttime + maxtrialtime: time = libtime.get_time() if time > box_time and box_shown == 0: scr.screen.append(box)
# First we get the current length of the stimscr's list of stimuli, which # will be the index at which the new ImageStim will be assigned to. stim_index = len(stimscr.screen) # Then we add the ImageStim to the stimscr. Every time you call # disp.fill(stimscr) and then disp.show(), all stimuli in stimscr # (including the ImageStim) will be drawn. stimscr.screen.append(stim) # Wait until the participant presses any key to start. disp.fill(textscr) disp.show() kb.get_key(keylist=None, timeout=None, flush=True) # Log the start of the trial. log.write([time.strftime("%y-%m-%d"), time.strftime("%H-%M-%S"), \ trialnr, vidname, timer.get_time()]) # Start eye tracking. tracker.start_recording() timer.pause(5) tracker.log("TRIALSTART") # Show a status message on the EyeLink. if TRACKERTYPE == 'eyelink': tracker.status_msg("Trial %d/%d (%s)" % (trialnr, len(VIDEOS), vidname)) # Log trial specifics to gaze data file. timer.pause(5) tracker.log("TRIALNR %d; VIDNAME %s; EXPTIME %d; PCTIME %s" % \ (trialnr, vidname, timer.get_time(), \
def CDPFixationPoint(self, tfixation, Name, tol, x, y): self.Visu.blackscreen() datej = datetime.datetime.now().strftime("%d-%m-%Y") date = datetime.datetime.now().strftime("%d-%m-%Y_%H:%M:%S") table = load_workbook( self.Config.getDataDirname('FixationPoint') + '/' + Name + '.xlsx') mysheet = table.active ListeResultat = [] txp = 0 tdeb = 0 tfix = 0 cptperte = 0 self.tracker.start_recording() #table = Workbook() #gazePosSheet = table.active #gazePosSheet.title = 'gazePos' Result = 1 #gazePosSheet.append(["Time", "XOeilDroit", "YOeilDroit","XOeilGauche","YOeilGauche","xRetenu","Yretenu","Etat"]) pospxl = self.norm_2_px((x, y)) self.Visu.draw_AOI_fix(pospxl[0], pospxl[1], tol) self.Visu.VisuShow() screen = libscreen.Screen() cptdefauteyetracker = 0 #le cercle reste blanc screen.draw_circle(colour='white', pos=pospxl, r=30, pw=2, fill=True) self.disp.fill(screen=screen) self.disp.show() oldTimeStamp = 0 tdebxp = libtime.get_time() txp, Newgazepos = self.tracker.binocular_sample() while tdeb < 3000 and not ((Newgazepos[2][0] > (pospxl[0] - tol) and Newgazepos[2][0] < (pospxl[0] + tol)) and (Newgazepos[2][1] > (pospxl[1] - tol) and Newgazepos[2][1] < (pospxl[1] + tol))): print(tdeb) time.sleep(0.005) NewTimeStamp, Newgazepos = self.tracker.binocular_sample() self.Visu.Show_gaze(Newgazepos[2][0], Newgazepos[2][1]) newTime = libtime.get_time() if NewTimeStamp != oldTimeStamp: t = int(NewTimeStamp - txp) / 1000 etat = self.etat_yeux(Newgazepos[0], Newgazepos[1]) #gazePosSheet.append([t, Newgazepos[0][0],Newgazepos[0][1],Newgazepos[1][0],Newgazepos[1][1],Newgazepos[2][0],Newgazepos[2][1],etat]) oldTimeStamp = NewTimeStamp tdeb = (libtime.get_time() - tdebxp) screen = libscreen.Screen() if tdeb < 3000: while tfix < tfixation and cptperte < 300: newTime = libtime.get_time() time.sleep(0.005) NewTimeStamp, Newgazepos = self.tracker.binocular_sample() self.Visu.Show_gaze(Newgazepos[2][0], Newgazepos[2][1]) self.Visu.VisuShow() #si l'individu ne regarde pas le point central if (Newgazepos[2][0] > (pospxl[0] - tol) and Newgazepos[2][0] < (pospxl[0] + tol)) and (Newgazepos[2][1] > (pospxl[1] - tol) and Newgazepos[2][1] < (pospxl[1] + tol)): screen = libscreen.Screen() #Le cercle devient vert #screen.draw_circle(colour=(int(128-(tfix*128/tfixation)),128,0), pos= pospxl, r=30, pw=2, fill=True) #screen.draw_circle(colour='green', pos= pospxl, r=30, pw=2, fill=True) screen.draw_circle(colour='white', pos=pospxl, r=30, pw=2, fill=True) self.disp.fill(screen=screen) self.disp.show() tfix += (libtime.get_time() - newTime) cptperte = 0 cptdefauteyetracker = 0 else: # si l'individu regarde le point central if Newgazepos[2] == (-1, -1): cptdefauteyetracker += (libtime.get_time() - newTime) cptperte += (libtime.get_time() - newTime) if NewTimeStamp != oldTimeStamp: t = int(NewTimeStamp - txp) / 1000 etat = self.etat_yeux(Newgazepos[0], Newgazepos[1]) #gazePosSheet.append([t, Newgazepos[0][0],Newgazepos[0][1],Newgazepos[1][0],Newgazepos[1][1],Newgazepos[2][0],Newgazepos[2][1],etat]) oldTimeStamp = NewTimeStamp if tfix > tfixation: self.RecSound.play() Result = 0 self.tracker.stop_recording() if tdeb > 3000: Resulat = 1 self.ErrSound.play() screen = libscreen.Screen(bgc='white') self.disp.fill(screen=screen) self.disp.show() time.sleep(3) elif tfix < tfixation: if cptdefauteyetracker > 200: Result = 2 else: Result = 3 self.ErrSound.play() screen = libscreen.Screen(bgc='white') self.disp.fill(screen=screen) self.disp.show() time.sleep(3) mysheet.append([datej, x, y, tol, tfixation, Result]) #table.save(self.Config.getDataDirname('FixationPoint') + '/' + Name +'_' + date + '_' + Result + '_' + str(tfixation) + 'ms' + '.xls') table.save( self.Config.getDataDirname('FixationPoint') + '/' + Name + '.xlsx') print('fin') screen = libscreen.Screen() self.disp.fill(screen=screen) self.disp.show()
def CDPExplorationVisage(self, name, xfix, yfix): xfix = float(xfix) yfix = float(yfix) datedeb = datetime.datetime.now().strftime("%d-%m-%Y_%H:%M:%S") os.mkdir( self.Config.getDataDirname('Exploration Visages') + '/' + name + '_' + datedeb) #SetImageFile = self.SelectionSetIndividu(name) SetImageFile = [ self.Config.getImageDirname( 'Exploration Visages/anu/Anubis_02_200110DSC06806.resized.jpg' ), self.Config.getImageDirname( 'Exploration Visages/anu/Anubis_Ref_200123DSC07844.resized.jpg' ), self.Config.getImageDirname( 'Exploration Visages/bar/Barnabe_Ref_200123DSC07867.resized.jpg' ), self.Config.getImageDirname( 'Exploration Visages/bar/Barnabe_04_200123DSC07859.resized.jpg' ), self.Config.getImageDirname( 'Exploration Visages/ces/Cesar_03_200129DSC08442.resized.jpg'), self.Config.getImageDirname( 'Exploration Visages/ces/Cesar_Ref_200123DSC07860.resized.jpg') ] random.shuffle(SetImageFile) self.tracker.start_recording() print("L'expérience a débuté") for img in SetImageFile: poscercle = self.norm_2_px((xfix, yfix)) date = datetime.datetime.now().strftime("%d-%m-%Y_%H:%M:%S") imgname = os.path.basename(img) imgname = os.path.splitext(imgname)[0] table = Workbook() gazePosSheet = table.active gazePosSheet.title = 'gazePos' informationsheet = table.create_sheet("Information") informationsheet.append( ["NomImg", "xImage", "yImage", "xdep", "ydep"]) gazePosSheet.append([ "Time", "XOeilDroit", "YOeilDroit", "XOeilGauche", "YOeilGauche", "xRetenu", "Yretenu", "Etat" ]) informationsheet.append( [img, 960, 702, poscercle[0], poscercle[1]]) screen = libscreen.Screen() #le cercle reste blanc screen.draw_circle(colour='white', pos=poscercle, r=30, pw=2, fill=True) self.disp.fill(screen=screen) self.disp.show() tfix = 0 cptperte = 0 while tfix < 250: if self.kb.get_key(keylist=['space'], flush=False)[0]: screen.clear() self.disp.fill(screen=screen) self.disp.show() self.tracker.stop_recording() return () newTime = libtime.get_time() time.sleep(0.005) NewTimeStamp, Newgazepos = self.tracker.binocular_sample() # si l'individu ne regarde pas le point central if (Newgazepos[2][0] < poscercle[0] - 100 or Newgazepos[2][0] > poscercle[0] + 100) or ( Newgazepos[2][1] < poscercle[1] - 100 or Newgazepos[2][1] > poscercle[1] + 100): cptperte += (libtime.get_time() - newTime) if cptperte > 300: tfix = 0 screen.draw_circle(colour='white', pos=poscercle, r=30, pw=2, fill=True) self.disp.fill(screen=screen) self.disp.show() else: # si l'individu regarde le point screen = libscreen.Screen() #Le cercle devient vert screen.draw_circle(colour=(int(128 - (tfix * 128 / 250)), 128, 0), pos=poscercle, r=30, pw=2, fill=True) self.disp.fill(screen=screen) self.disp.show() tfix += (libtime.get_time() - newTime) cptperte = 0 screen.clear() screen.draw_image(image=img, pos=(960, 702)) self.disp.fill(screen=screen) self.disp.show() tdeb = libtime.get_time() oldTimeStamp = 0 txp, gase = self.tracker.binocular_sample() while libtime.get_time() - tdeb < 4000: time.sleep(0.01) NewTimeStamp, Newgazepos = self.tracker.binocular_sample() if NewTimeStamp != oldTimeStamp: t = int(NewTimeStamp - txp) / 1000 etat = self.etat_yeux(Newgazepos[0], Newgazepos[1]) gazePosSheet.append([ t, Newgazepos[0][0], Newgazepos[0][1], Newgazepos[1][0], Newgazepos[1][1], Newgazepos[2][0], Newgazepos[2][1], etat ]) oldTimeStamp = NewTimeStamp screen.clear() self.disp.fill(screen=screen) self.disp.show() self.RecSound.play() table.save( self.Config.getDataDirname('Exploration Visages') + '/' + name + '_' + datedeb + '/' + name + '_' + imgname + '_' + date + '.xls') time.sleep(2) print("L'expérience est terminée") self.tracker.stop_recording()
def CDPcontroleNictation(self): ''' Description : Après avoir fixé un point central pendant 500 ms, une image de mer (paysage calm et apsiant) apparait pednant 30 sec puis un autre point a fixer et une image de "Ou est Charlie" apparait et l'individu doit trouver Charlier (COncentration). Cette foction renvoie un tableau excel avec les données de l'eye tracker permettant d'effectuer un contrôle psitiif sur les nictations de l'individu ''' date = datetime.datetime.now().strftime("%d-%m-%Y_%H:%M:%S") self.tracker.start_recording() name = 'Charlie' ScreenNict = libscreen.Screen() Img = self.Config.getImageDirname('ControlNictation') + '/Charlie.jpg' table = Workbook() gazePosSheet = table.active gazePosSheet.title = 'gazePos' informationsheet = table.create_sheet("Information") gazePosSheet.append([ "Time", "XOeilDroit", "YOeilDroit", "XOeilGauche", "YOeilGauche", "xRetenu", "Yretenu", "Etat" ]) informationsheet.append(["NomImg", "xImage", "yImage"]) print("Début de l'expérience") self.ValidationFixation(ScreenNict, 500) ScreenNict.draw_image(image=Img) self.disp.fill(screen=ScreenNict) self.disp.show() tdeb = libtime.get_time() oldTimeStamp = 0 txp, gase = self.tracker.binocular_sample() while libtime.get_time() - tdeb < 30000: time.sleep(0.01) NewTimeStamp, Newgazepos = self.tracker.binocular_sample() if NewTimeStamp != oldTimeStamp: t = int(NewTimeStamp - txp) / 1000 etat = self.etat_yeux(Newgazepos[0], Newgazepos[1]) gazePosSheet.append([ t, Newgazepos[0][0], Newgazepos[0][1], Newgazepos[1][0], Newgazepos[1][1], Newgazepos[2][0], Newgazepos[2][1], etat ]) oldTimeStamp = NewTimeStamp if self.kb.get_key(keylist=['space'], flush=False)[0]: ScreenNict.clear() self.disp.fill(screen=ScreenNict) self.disp.show() self.tracker.stop_recording() print("Fin de l'expérience") return () ScreenNict.clear() self.disp.fill(screen=ScreenNict) self.disp.show() informationsheet.append([name + '.jpg', 960, 540]) table.save( '/home/eyetracker/Bureau/Data/Experiences/ControlNictation/' + self.nameInd + '_' + name + '_' + date + '.xls') print("Fin de l'expérience") self.tracker.stop_recording()
def CDPFixVisage(self): ''' Description: Après avoir fixé un point pendant un certain (défini dans le fichier .json), un visage apparaît à droite ou à gauche de l'écran. L'individu peut alors regarder l'image pendant 5 secondes. Il y a 8 images de femmes, 8 images d'hommes et 8 images de Tonkeans. Des images sont sauvegardées avec une heatmap des temps de fixation de l'individu, des saccades et des points où il a regardé. ''' if self.filename == 0: tkinter.messagebox.showerror( title="Erreur", message='Veuillez selectionner un fichier de calibration') else: # Lecture des paramètres de l'expérience filename = self.Config.getConfigFilename('VISAGES') try: with open(filename, "r") as fichier: data = json.load(fichier) dirImage = self.Config.getImageDirname( str(data["image_dirname"])) + '/' CheminSave = self.Config.getDataDirname( str(data["result_dirname"])) + '/' duration = data["duration"] fixationStart = data["fixation_start"] frequency = data["frequency"] except IOError: print("fichier %s introuvable", filename) date = datetime.datetime.now().strftime("%d-%m-%Y_%H:%M:%S") os.mkdir(CheminSave + self.nameInd + '_' + date) dirToSave = CheminSave + self.nameInd + '_' + date + '/' ScreenVisage = libscreen.Screen() # Liste des noms d'images à afficher ListeNomImg = os.listdir(dirImage) #mélange aléatoirement la liste pour afficher les images dans un ordre aléatoire random.shuffle(ListeNomImg) self.tracker.start_recording() for Img in ListeNomImg: table = Workbook() gazePosSheet = table.active gazePosSheet.title = 'gazePos' informationsheet = table.create_sheet("Information") gazePosSheet.append([ "Time", "XOeilDroit", "YOeilDroit", "XOeilGauche", "YOeilGauche", "xRetenu", "Yretenu", "Etat" ]) informationsheet.append(["NomImg", "xImage", "yImage"]) self.ValidationFixation(ScreenVisage, fixationStart) a = random.randint(0, 1) # Choix aléatoire de la position de l'image (à droite ou à gauche) if a == 0: Pos = self.norm_2_px((0.25, 0.5)) else: Pos = self.norm_2_px((0.75, 0.5)) LisTOeilDroit = [] LisTOeilGauche = [] ListeOeilRetenu = [] temps = [] ScreenVisage.draw_image(image=dirImage + Img, pos=Pos) self.disp.fill(screen=ScreenVisage) self.Visu.affiche_image(dirImage + Img, Pos) self.disp.show() tdeb = libtime.get_time() oldTimeStamp = 0 txp, gase = self.tracker.binocular_sample() while libtime.get_time() - tdeb < duration: time.sleep(frequency) NewTimeStamp, Newgazepos = self.tracker.binocular_sample() self.Visu.Show_gaze(Newgazepos[2][0], Newgazepos[2][1]) if NewTimeStamp != oldTimeStamp: t = int(NewTimeStamp - txp) / 1000 etat = self.etat_yeux(Newgazepos[0], Newgazepos[1]) gazePosSheet.append([ t, Newgazepos[0][0], Newgazepos[0][1], Newgazepos[1][0], Newgazepos[1][1], Newgazepos[2][0], Newgazepos[2][1], etat ]) oldTimeStamp = NewTimeStamp ScreenVisage.clear() self.disp.fill(screen=ScreenVisage) self.disp.show() self.Visu.blackscreen() informationsheet.append([Img, Pos[0], Pos[1]]) table.save(dirToSave + self.nameInd + '_' + os.path.splitext(Img)[0] + '_Donnees.xls') self.tracker.stop_recording()
tracker.log("TRIALSTART %d" % trialnr) tracker.log("IMAGENAME %s" % images[trialnr]) tracker.status_msg("trial %d/%d" % (trialnr + 1, ntrials)) # present image disp.fill(scr) t0 = disp.show() tracker.log("image online at %d" % t0) is_found = 1 # wait for a bit for participant viewing image while key != 'space': # check for key input key, presstime = kb.get_key(keylist=['space'], timeout=1) gaze_pos = tracker.sample() gaze_time = timer.get_time() - t0 if aoi.contains(gaze_pos): print(gaze_pos) log.write([ trialnr, images[trialnr], gaze_pos[0], gaze_pos[1], gaze_time ]) if timer.get_time() - t0 >= TRIALTIME_L: is_found = 0 break log_sub.write([ntrials, images[trialnr], is_found, timer.get_time() - t0]) # reset screen disp.fill() t1 = disp.show() tracker.log("image offline at %d" % t1) # stop recording
def ApprentissagePos(self, Visu): self.RecSound = libsound.Sound( soundfile='/home/eyetracker/Downloads/2.wav') tablepos = Workbook() gazePosSheet = tablepos.active gazePosSheet.title = 'gazePos' gazePosSheet.append(["Time", "LeftValidity", "RightValidity"]) tvtot = 0 trtot = 0 Tp = 0 Tr = 0 Tv = 0 texp = 0 oldtmaxcons = 0 Nbrec = 0 TinterTrial = 0 dist = None cptPerte = 0 Visu.write_time(Tp, Tr, Tv, texp, Nbrec, TinterTrial, tvtot, trtot, dist, oldtmaxcons) Visu.VisuShow() origin = (int(self.disp.dispsize[0] / 4), int(self.disp.dispsize[1] / 4)) size = (int(2 * self.disp.dispsize[0] / 4), int(2 * self.disp.dispsize[1] / 4)) texp = libtime.get_time() OldTime = libtime.get_time() while not self.kb.get_key(keylist=['space'], flush=False)[0]: OeilD = 0 OeilG = 0 gaze_sample = copy.copy(self.gaze[-1]) self.screen.clear() validity_colour = (255, 0, 0) col = 'red' left_validity = False right_validity = False if gaze_sample['right_gaze_origin_validity'] and gaze_sample[ 'left_gaze_origin_validity']: left_validity = 0.15 < gaze_sample[ 'left_gaze_origin_in_trackbox_coordinate_system'][2] < 0.85 right_validity = 0.15 < gaze_sample[ 'right_gaze_origin_in_trackbox_coordinate_system'][2] < 0.85 if right_validity and left_validity: validity_colour = (0, 255, 0) col = 'green' self.screen.draw_line(colour=validity_colour, spos=origin, epos=(origin[0] + size[0], origin[1]), pw=1) self.screen.draw_line(colour=validity_colour, spos=origin, epos=(origin[0], origin[1] + size[1]), pw=1) self.screen.draw_line(colour=validity_colour, spos=(origin[0], origin[1] + size[1]), epos=(origin[0] + size[0], origin[1] + size[1]), pw=1) self.screen.draw_line(colour=validity_colour, spos=(origin[0] + size[0], origin[1] + size[1]), epos=(origin[0] + size[0], origin[1]), pw=1) Visu.effacer_mouvement() right_eye, left_eye, distance = None, None, [] if gaze_sample['right_gaze_origin_validity']: distance.append( round( gaze_sample[ 'right_gaze_origin_in_user_coordinate_system'][2] / 10, 1)) OeilD = 1 if right_validity: OeilD = 2 right_eye = ( (1 - gaze_sample[ 'right_gaze_origin_in_trackbox_coordinate_system'][0]) * size[0] + origin[0], gaze_sample[ 'right_gaze_origin_in_trackbox_coordinate_system'][1] * size[1] + origin[1]) self.screen.draw_circle(colour=validity_colour, pos=right_eye, r=int(self.disp.dispsize[0] / 100), pw=5, fill=True) Visu.Show_droit(right_eye[0], right_eye[1], col, int(self.disp.dispsize[0] / 200)) if gaze_sample['left_gaze_origin_validity']: distance.append( round( gaze_sample[ 'left_gaze_origin_in_user_coordinate_system'][2] / 10, 1)) OeilG = 1 if left_validity: OeilG = 2 left_eye = ( (1 - gaze_sample[ 'left_gaze_origin_in_trackbox_coordinate_system'][0]) * size[0] + origin[0], gaze_sample[ 'left_gaze_origin_in_trackbox_coordinate_system'][1] * size[1] + origin[1]) self.screen.draw_circle(colour=validity_colour, pos=left_eye, r=int(self.disp.dispsize[0] / 100), pw=5, fill=True) Visu.Show_gauche(left_eye[0], left_eye[1], col, int(self.disp.dispsize[0] / 200)) gazePosSheet.append( [int((libtime.get_time() - texp)), OeilG, OeilD]) Visu.VisuShow() if self._mean(distance) != None: dist = self._mean(distance) NewTime = libtime.get_time() deltaT = NewTime - OldTime OldTime = NewTime txp = libtime.get_time() - texp if cptPerte > 650: Tv = 0 Tr = 0 if OeilD == 2 and OeilG == 2: Tv += deltaT cptPerte = 0 tvtot += deltaT elif OeilD == 1 or OeilG == 1: Tr += deltaT cptPerte = 0 trtot += deltaT else: Tp += deltaT cptPerte += deltaT newtmaxcons = Tr + Tv if newtmaxcons > oldtmaxcons: oldtmaxcons = newtmaxcons if self.kb.get_key(keylist=['q'], flush=False)[0]: tdeb = libtime.get_time() self.RecSound.play() self.screen.clear() self.disp.fill(self.screen) self.disp.show() Nbrec += 1 Tr = 0 Tv = 0 self.kb.get_key(keylist=['q'], flush=True, timeout=None) TinterTrial += libtime.get_time() - tdeb OldTime = libtime.get_time() txp = libtime.get_time() - texp if self.kb.get_key(keylist=['tab'], flush=False)[0]: tdeb = libtime.get_time() self.screen.clear() self.disp.fill(self.screen) self.disp.show() Tr = 0 Tv = 0 self.kb.get_key(keylist=['tab'], flush=True, timeout=None) TinterTrial += libtime.get_time() - tdeb OldTime = libtime.get_time() txp = libtime.get_time() - texp Visu.effacer_text() Visu.write_time(int(Tp), int(Tr), int(Tv), int(txp / 100) / 10, Nbrec, int(TinterTrial), int(tvtot), int(trtot), dist, int(oldtmaxcons)) Visu.VisuShow() self.disp.fill(self.screen) self.disp.show() Visu.effacer_text() Visu.VisuShow() self.screen.clear() self.disp.fill(self.screen) self.disp.show() return ([ 'ind', 'date', 'jour', int(tvtot), int(trtot), int(Tp), int(txp), int(oldtmaxcons), 'Condidtion', Nbrec, int(TinterTrial), Nbrec, 'Remarque' ], tablepos)
eyetracker.calibrate() # display surface disp.fill(screen=blankscreen) disp.show() # # # # # # game # run several rounds for trialnr in range(0,TRIALS): # start eye tracking eyetracker.start_recording() eyetracker.log("start_trial %d" % trialnr) trialstart = libtime.get_time() # run game points = 0 stimpos = STIMPOS t0 = libtime.get_time() tstim = libtime.get_time() while libtime.get_time() - t0 < GAMEDURATION: # get gaze position gazepos = eyetracker.sample() # get keypress key, presstime = keyboard.get_key() # handle input if key: if key == 'escape': break
pw=5, diameter=30) number_screen = Screen() number_screen.draw_text(text=str(np.random.randint(1, 10)), pos=center_of_screen, colour=(255, 255, 255), fontsize=40) face_pair_screen = Screen() disengagement_screen = Screen() # start with blank screen for 500 ms and start recording disp.fill() disp.show() tracker.start_recording() tracker.log("start_trial %d" % trialnr) trialstart = libtime.get_time() libtime.pause(500) # fixation cross screen disp.fill(fixation_cross_screen) disp.show() libtime.pause(500) fixation_cross_screen.clear() # number screen disp.fill(number_screen) disp.show() libtime.pause(1000) number_screen.clear() #draws image pair
from pygaze.display import Display from pygaze.screen import Screen from pygaze.eyetracker import EyeTracker import pygaze.libtime as timer disp = Display() scr = Screen() scr.draw_text("Preparing experiment...", fontsize=20) disp.fill(scr) disp.show() tracker = EyeTracker(disp) tracker.calibrate() tracker.start_recording() t0 = timer.get_time() while timer.get_time() - t0 < 5000: gazepos = tracker.sample() scr.clear() scr.draw_fixation(fixtype='dot', pos=gazepos) disp.fill(scr) disp.show() tracker.stop_recording() tracker.close() disp.close()
def run(self): dt = datetime.now() #Eye tracker configure eyetracker = EyeTracker(self.disp) eyetracker.calibrate() self.disp.fill(self.canvas) self.disp.show() #self.disp.mousevis = True eyetracker.start_recording() etObject = objects.EyeTracker(0, 0, 20, 20) ##END main_music = pygame.mixer.music.load("media/sounds/megalovania.wav") pygame.mixer.music.play() pygame.mixer.music.set_volume(0.6) bob = self.player # List to hold all the sprites all_sprite_list = pygame.sprite.Group() # Make the walls. (x_pos, y_pos, width, height) wall_list = pygame.sprite.Group() # List of Foods food_list = pygame.sprite.Group() wall = objects.Wall("", 0, 40, 10, 560, 1) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) wall = objects.Wall("", 10, 40, 980, 10, 1) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) wall = objects.Wall("", 990, 40, 10, 560, 1) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) wall = objects.Wall("", 10, 590, 980, 10, 1) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) ## Gondulas wall = objects.Wall("", 100, 200, 226, 40, 8) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) wall = objects.Wall("", 100, 400, 226, 40, 8) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) wall = objects.Wall("", 620, 200, 226, 40, 8) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) wall = objects.Wall("", 620, 400, 226, 40, 8) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) wall = objects.Wall("", 450, 220, 40, 226, 9) wall_list.add(wall) all_sprite_list.add(wall) obstacles.append(wall) ## ATM atm_list = pygame.sprite.Group() atm = objects.ATM("", 940, 45, 13, 35, 1) atm_list.add(atm) all_sprite_list.add(atm) ## Monsters monster_list = pygame.sprite.Group() fFood = objects.FastFood("", 850, 400, 30, 30, 3) monster_list.add(fFood) all_sprite_list.add(fFood) fFood2 = objects.FastFood("", 500, 130, 30, 30, 3) monster_list.add(fFood2) all_sprite_list.add(fFood2) fFood = objects.FastFood("", 270, 100, 30, 30, 3) monster_list.add(fFood) all_sprite_list.add(fFood) fFood = objects.FastFood("", 220, 450, 30, 30, 3) monster_list.add(fFood) all_sprite_list.add(fFood) fFood = objects.FastFood("", 450, 470, 30, 30, 4) monster_list.add(fFood) all_sprite_list.add(fFood) ## bob.walls = wall_list all_sprite_list.add(bob) bob.updateValues() #pygame.draw.rect(self.screen, (255,0,0) ,((bob.position),(bob.collisionWidth, bob.collisionHeight)),0) for wall in obstacles: pygame.draw.rect(self.screen, (0, 0, 0), wall.rect, 0) time_decrement = pygame.USEREVENT + 1 T1 = 1000 # second pygame.time.set_timer(time_decrement, T1) card_generator = pygame.USEREVENT + 2 T2 = 4000 # 4 second pygame.time.set_timer(card_generator, T2) monster_move = pygame.USEREVENT + 3 T3 = 100 # 0,1 second pygame.time.set_timer(monster_move, T3) food_time = pygame.USEREVENT + 4 T4 = 8000 # 8 seconds pygame.time.set_timer(food_time, T4) eyeTracker_time = pygame.USEREVENT + 5 T5 = 1000 # 1 seconds pygame.time.set_timer(eyeTracker_time, T5) logRecord_time = pygame.USEREVENT + 6 T6 = 1000 # 1 seconds pygame.time.set_timer(logRecord_time, T6) #gravar tempo que comeca uma fixacao logRecord_fixation = pygame.USEREVENT + 7 T7 = 1000 #2seconds pygame.time.set_timer(logRecord_fixation, T7) cards_hit_list = pygame.sprite.spritecollide(bob, cards_list, False) bground = BackGround(self.screen) cont_blinks = 0 gameRunning = True staring = False position = 0 blinkCount = 0 lastBlinkPos = (0, 0) x = random.randint(50, 800) y = random.randint(50, 400) cash = objects.Cash("", x, y, 20, 20, 2) cards_list.append(cash) pygame.draw.rect(self.screen, (0, 255, 0), cash.rect, 0) all_sprite_list.add(cash) #comeco do game while (gameRunning): self.canvas.clear() self.screen.fill((255, 255, 255)) self.screen.blit(self.image, (0, 40)) clock.tick(60) cards_hit_list = pygame.sprite.spritecollide( bob, cards_list, False) monster_hit_list = pygame.sprite.spritecollide( bob, monster_list, False) atm_hit_list = pygame.sprite.spritecollide(bob, atm_list, False) food_hit_list = pygame.sprite.spritecollide(bob, food_list, False) etSawList = pygame.sprite.spritecollide(etObject, food_list, False) if (len(etSawList) == 0): staring = False for atm in atm_hit_list: if (bob.direction == "up"): bob.rect.top = atm.rect.bottom elif (bob.direction == "right"): bob.rect.right = atm.rect.left elif (bob.direction == "left"): bob.rect.left = atm.rect.right for monster in monster_hit_list: pygame.mixer.music.fadeout(1000) pygame.mixer.Sound.play(go_sound) pygame.time.delay(3500) gameRunning = False for card in cards_hit_list: bob.c_card += 1 pygame.mixer.Sound.play(card_sound) cards_list.remove(card) all_sprite_list.remove(card) #self.avalgame.storeCreditCollection(self.startTime) for food in food_hit_list: if (event.type == pygame.KEYDOWN and event.key == pygame.K_RETURN and bob.cash >= food.value): bob.buyFood(food) food_list.empty() self.player.total_produtos += 1 if (bob.score >= 10): gameRunning = False ##### FRAME EVENTS ## Time Decrementer for event in pygame.event.get(): if (event.type == time_decrement): bob.time -= 1 if (event.type == eyeTracker_time): #verificar se houve fixacao time = libtime.get_time() getX, getY = eyetracker.sample() self.dataStore.get_quadrant((getX, getY)) self.dataStore.start_fixation((getX, getY)) if (event.type == logRecord_time): etObject.setPosition(eyetracker.sample()) for food in etSawList: self.dataStore.start_staring(food.food_type) if (event.type == MOUSEBUTTONDOWN): start_time = eyetracker.wait_for_event(3) time_end = eyetracker.wait_for_event(4) # cont_blinks += 1 self.dataStore.start_blinking(str(cont_blinks), start_time, time_end) tracker_pos = eyetracker.sample() if (tracker_pos != lastBlinkPos): self.dataStore.start_blinkingTest( lastBlinkPos, blinkCount) lastBlinkPos = tracker_pos blinkCount = 1 else: blinkCount += 1 if (event.type == food_time): food_list.empty() newFood1 = objects.Food(5, "vegetal") food_list.add(newFood1) newFood2 = objects.Food(5, "carbohidrato") food_list.add(newFood2) newFood3 = objects.Food(5, "doce") food_list.add(newFood3) newFood4 = objects.Food(5, "proteina") food_list.add(newFood4) ## Credit Card Generator if (event.type == card_generator and len(cards_list) < 2): cashGenerator = random.randint(0, 100) if (cashGenerator <= 25): x = random.randint(50, 800) y = random.randint(50, 400) cash = objects.Cash("", x, y, 20, 20, 2) cards_list.append(cash) pygame.draw.rect(self.screen, (0, 255, 0), cash.rect, 0) all_sprite_list.add(cash) ## Monster Movement if (event.type == monster_move): for monster in monster_list: monsterCollision = pygame.sprite.spritecollide( monster, wall_list, False) if (monster.movingPositive): if (len(monsterCollision) == 0): if monster.obj_type == 4: monster.rect.left -= 15 else: monster.rect.top -= 15 else: if monster.obj_type == 4: monster.rect.right += 15 monster.movingPositive = False else: monster.rect.bottom += 15 monster.movingPositive = False else: if (len(monsterCollision) == 0): if monster.obj_type == 4: monster.rect.right += 15 else: monster.rect.bottom += 15 else: if monster.obj_type == 4: monster.rect.left -= 15 monster.movingPositive = True else: monster.rect.top -= 15 monster.movingPositive = True ## Player Input if (event.type == pygame.KEYDOWN): pygame.event.set_blocked(pygame.KEYDOWN) if (event.key == pygame.K_ESCAPE): gameRunning = False elif (event.key == pygame.K_UP): bob.acceleration = 5 bob.direction = "up" elif (event.key == pygame.K_DOWN): bob.acceleration = 5 bob.direction = "down" elif (event.key == pygame.K_LEFT): bob.acceleration = 5 bob.direction = "left" elif (event.key == pygame.K_RIGHT): bob.acceleration = 5 bob.direction = "right" elif (event.key == pygame.K_RETURN): if (dist(bob.rect.x, bob.rect.y, atm.rect.x, atm.rect.y) <= 65 and bob.c_card >= 1): pygame.mixer.Sound.play(cash_sound) bob.c_card -= 1 bob.cash += 15 self.player.cashTotal += 15 if (event.type == pygame.KEYUP): bob.acceleration = 0 pygame.event.set_allowed(pygame.KEYDOWN) bob.updateValues() if (bob.direction == "up"): bob.moveUp() elif (bob.direction == "down"): bob.moveDown() elif (bob.direction == "left"): bob.moveLeft() elif (bob.direction == "right"): bob.moveRight() if bob.time <= 0: gameRunning = False all_sprite_list.update() all_sprite_list.draw(self.screen) food_list.draw(self.screen) self.screen.blit(bob.timeLabel, (450, 0)) # Player Interface Draw cash_x = 0 cash_y = 20 self.screen.blit(bob.cashLabel, (cash_x, cash_y)) cCard_x = 0 cCard_y = 0 self.screen.blit(bob.c_cardLabel, (cCard_x, cCard_y)) self.screen.blit(bob.scoreLabel, (1000 - bob.scoreLabel.get_rect().width - 50, 0)) ##BARS self.screen.blit(bob.carboLabel, (55, 610)) self.screen.blit(bob.vegLabel, (30, 640)) self.screen.blit(bob.protLabel, (565, 610)) self.screen.blit(bob.doceLabel, (500, 640)) self.progressBars(bob) ##Display #pygame.display.flip() self.disp.fill(self.canvas) self.disp.show() pygame.event.set_allowed(pygame.KEYDOWN) pygame.mixer.music.fadeout(1000) pygame.mixer.music.load("media/sounds/crimson.wav") pygame.mixer.music.play() pyramidCompletion = 0.0 if (self.player.doce == 10): pyramidCompletion += 2.5 if (self.player.proteina == 20): pyramidCompletion += 2.5 if (self.player.vegetal == 30): pyramidCompletion += 2.5 if (self.player.carbohidrato == 40): pyramidCompletion += 2.5 self.avalgame.storePyramidCompletion(self.startTime, valor_AEEJ=pyramidCompletion) foodTotal = 0 if (0 < self.player.total_produtos <= 5): foodTotal = 1 elif (5 < self.player.total_produtos <= 10): foodTotal = 2 elif (self.player.total_produtos > 10): foodTotal = 3 self.avalgame.storeFoodQuantity(self.startTime, valor_AEEJ=foodTotal) if self.player.cashTotal == 0: averageScore = 0 else: averageScore = float( float(self.player.total_produtos) / float(self.player.cashTotal)) * 100 self.avalgame.storeAverageScore(self.startTime, valor_AEEJ=averageScore) self.dataStore.start_blinkingTest(lastBlinkPos, blinkCount) self.dataStore.log_gen.recordBlinkLog(self.dataStore.blink_log, 'blink-', 4, self.avalgame._playerCode) self.dataStore.log_gen.recordLog(self.dataStore.blink_log2, 'blink2-', 4, self.avalgame._playerCode) self.dataStore.log_gen.recordLog(self.dataStore.staring_log, 'products-', 3, self.avalgame._playerCode) self.dataStore.log_gen.recordLog(self.dataStore.quadrant_log, 'quadrants', 2, self.avalgame._playerCode) self.dataStore.log_gen.recordLog(self.dataStore.position_log, 'fixation-', 1, self.avalgame._playerCode) self.avalgame.recordBestScore(self.player.time, self.player.score) ge = GameEnd(self.canvas, self.disp) ge.defScore(bob.score) ge.defResult(bob.score) #ge.storeData(etObject.log) #ge.storeData2(etObject.log2) #ge.storeDataBlink(etObject.log_blink) #ge.storeDataFixation(etObject.log_fixation) ge.run()
# loop through runs for i, currRun in enumerate(trials): disp.fill(inter_run) # fill display t = disp.show()# show display event_log.write([t, "run %d onset" % (i)]) ### CONTINUE WHEN BUTTON PRESSED ### if MRI: # if MEG repeatedly loop until button state changes button, t1 = trigbox.wait_for_button_press(allowed=[MAIN_BUT], timeout=None) t1 = timer.get_time() # btn_pressed = False # set flag to false # while btn_pressed != True: # btn_list, state = trigbox.get_button_state(button_list = [MAIN_BUT]) # # State turns to False, the button was pressed. # if state[0] == False: # btn_pressed = True else: mousebutton, clickpos, t1 = mouse.get_clicked() event_log.write([t1, "buttonpress"]) # loop through blocks for ii, currBlock in enumerate(currRun): # Inter-block break screen. #inter block screen inter_block = Screen()
def start_staring(self, food_type): data = LogData(str(food_type) + " ", (libtime.get_time())) self.staring_log.append(data)
for trialnr in range(0, len(IMAGES)): # blank display disp.fill() disp.show() libtime.pause(1000) # prepare stimulus scr.clear() scr.draw_image(IMAGES[trialnr]) # start recording eye movements tracker.drift_correction() tracker.start_recording() tracker.status_msg("trial %d" % trialnr) tracker.log("start trial %d" % trialnr) # present stimulus response = None trialstart = libtime.get_time() while not response: gazepos = tracker.sample() frl.update(disp, scr, gazepos) response, presstime = kb.get_key(timeout=1) # stop tracking and process input tracker.stop_recording() tracker.log("stop trial %d" % trialnr) log.write([ trialnr, trialstart, presstime, presstime - trialstart, IMAGES[trialnr] ]) # close experiment log.close() tracker.close() disp.close()
def start_blinkingTest(self, position, blinkCount): # data = LogData( "(" + position[0] + "," + position[1] + ") " + str(blinkCount), (libtime.get_time())) data = LogData( str(position) + " " + str(blinkCount) + " ", (libtime.get_time())) self.blink_log.append(data)
# # # # # # PYGAZE INSTANCES # visual disp = Display() scr = Screen() # input js = Joystick() # # # # # # RUN # run until a minute has passed t0 = timer.get_time() t1 = timer.get_time() text = "Test the joystick!" while t1 - t0 < 60000: # get joystick input event, value, t1 = js.get_joyinput(timeout=10) # update text if event != None: text = text="%s: %s" % (event, value) if event == 'joyaxismotion' and RUMBLE: set_vibration(0, max(0, value[2]), max(0, -value[2])) # display on screen scr.clear() scr.draw_text(text="%s\n\n(%.2f)" % (text, float(t1-t0)/1000.0), fontsize=24) # show text disp.fill(scr)
class GenerateInfo: def __init__(self): self.log_gen = LogGenerator() self.staring_log = [] self.blink_log = [] self.blink_log2 = [] self.quadrant_log = [] self.position_log = [] self.fixation_log = [] def start_staring(self, food_type): data = LogData(str(food_type) + " ", (libtime.get_time())) self.staring_log.append(data) def start_blinkingTest(self, position, blinkCount): # data = LogData( "(" + position[0] + "," + position[1] + ") " + str(blinkCount), (libtime.get_time())) data = LogData( str(position) + " " + str(blinkCount) + " ", (libtime.get_time())) self.blink_log.append(data) def start_blinking(self, cont_blink, start_time, time_end): print(str(time_end[0] - start_time[0])) time = float(time_end[0]) data = LogData(str(cont_blink), time) self.blink_log2.append(data) def get_quadrant(self, (x, y)): ## QUADRANTE A if (0 <= x <= 250) and (0 <= y <= 150): quadrant = "A1" elif (250 <= x <= 500) and (0 <= y <= 150): quadrant = "A2" elif (0 <= x <= 250) and (150 <= y <= 300): quadrant = "A3" elif (250 <= x <= 500) and (150 <= y <= 300): quadrant = "A4" ## QUADRANTE B elif (500 <= x <= 750) and (0 <= y <= 150): quadrant = "B1" elif (750 <= x <= 1000) and (0 <= y <= 150): quadrant = "B2" elif (500 <= x <= 750) and (150 <= y <= 300): quadrant = "B3" elif (750 <= x <= 1000) and (150 <= y <= 300): quadrant = "B4" ## QUADRANTE C elif (0 <= x <= 250) and (300 <= y <= 450): quadrant = "C1" elif (250 <= x <= 500) and (300 <= y <= 450): quadrant = "C2" elif (0 <= x <= 250) and (450 <= y <= 600): quadrant = "C3" elif (250 <= x <= 500) and (450 <= y <= 600): quadrant = "C4" ## QUADRANTE D elif (500 <= x <= 750) and (300 <= y <= 450): quadrant = "D1" elif (750 <= x <= 1000) and (300 <= y <= 450): quadrant = "D2" elif (500 <= x <= 750) and (450 <= y <= 600): quadrant = "D3" elif (750 <= x <= 1000) and (450 <= y <= 600): quadrant = "D4" else: quadrant = "FORA" data = LogData(quadrant + " ", libtime.get_time()) self.quadrant_log.append(data)