def classify_Spindles(self): f = avg_q.avg_q_file(self.filteredfile, 'BrainVision') self.Spindles = [] # Tuples (start_s,duration_ms,Spindle_info dict) for collapsedchannel, (start, end) in self.Spindleranges: duration_ms = (end - start) / self.sfreq * 1000 # Count the zero crossings detector = avg_q.Detector.Detector(self.avg_q_instance) epochsource = avg_q.Epochsource( f, '100ms', '%gms' % (duration_ms + 200)) # Add 100ms on both ends epochsource.set_trigpoints('%gs' % (start / self.sfreq)) detector.add_Epochsource(epochsource) detector.add_transform(''' write_crossings ALL 0 stdout ''') crossings = detector.detect() trigger_dict = avg_q.Detector.get_trigger_dict(crossings) Spindle_info = {} # Will contain lists [freq_Hz,amplitude] for channel, cr in trigger_dict.items(): period_s = 2 * (cr[-1][0] - cr[0][0]) / (len(cr) - 1) / detector.sfreq Spindle_info[channel] = [1.0 / period_s] script = avg_q.Script(self.avg_q_instance) script.add_Epochsource(epochsource) script.add_transform(''' calc abs trim -a 0 0 write_generic -P -N stdout string ''') for line in script.runrdr(): channel, amp = line.split('\t') Spindle_info[channel].append(float(amp)) self.Spindles.append( (start / self.sfreq, duration_ms, Spindle_info))
def transferSection(self,sectionstart, sectionbefore, sectionafter, addmethods=None): doscript=addmethods if addmethods else '' ''' fftfilter 40Hz 45Hz 1 1 sliding_average 1 10ms write_generic %(append_arg)s %(correctedfile)s.eeg int16 writeasc %(append_arg)s %(correctedfile)s.asc write_synamps -c %(append_arg)s %(correctedfile)s.cnt %(sensitivity)g ''' writefile=''' sliding_average %(sliding_size)sms %(sliding_step)sms write_hdf -c %(append_arg)s %(correctedfile)s.hdf ''' % { 'sliding_size': 2000.0/self.correctedfile_sfreq, 'sliding_step': 1000.0/self.correctedfile_sfreq, 'append_arg': '-a' if self.append else '', 'correctedfile': self.correctedfile, 'sensitivity': 10, } script=avg_q.Script(self.avg_q_instance) epochsource=avg_q.Epochsource(self.infile,sectionbefore,sectionafter) epochsource.set_trigpoints(sectionstart) script.add_Epochsource(epochsource) script.add_transform(doscript) script.add_transform(writefile) script.run()
def average_ECG(self): '''Average ECG events with rejection. Returns the number of accepted ECGs.''' if os.path.exists(self.avgECGfile): ascfile=avg_q.avg_q_file(self.avgECGfile) return self.avg_q_instance.get_description(ascfile,'nrofaverages') self.detect_ECG() epochsource=avg_q.Epochsource(self.Epochsource_list[0].infile,self.ECG_beforetrig, self.ECG_aftertrig) epochsource.set_trigpoints(self.ECGtriggers) script=avg_q.Script(self.avg_q_instance) script.add_Epochsource(epochsource) if self.protect_channels is not None and len(self.protect_channels)>0: script.add_transform(''' scale_by -n ?%(protect_channels)s 0 ''' % { 'protect_channels': channel_list2arg(self.protect_channels), }) script.add_transform(''' baseline_subtract reject_bandwidth %(reject_bandwidth)f ''' % { 'reject_bandwidth': self.ECG_reject_bandwidth, }) script.set_collect('average') script.add_postprocess(''' query nrofaverages stdout writeasc -b %(avgECGfile)s ''' % { 'avgECGfile': self.avgECGfile}) rdr=script.runrdr() nrofaverages=0 for line in rdr: nrofaverages=int(line) return nrofaverages
def collect_artifacts(self, remove_channels=None, preprocess=''): '''Analyze the given section of the continuous file for artifacts. remove_channels is a list of channel names to exclude for detection.''' epochsource = avg_q.Epochsource(self.infile, 0, self.end_point - self.start_point) epochsource.set_trigpoints(self.start_point) script = avg_q.Script(self.avg_q_instance) script.add_Epochsource(epochsource) script.set_collect('append') import tempfile tempscalefile = tempfile.NamedTemporaryFile() script.add_postprocess( ''' %(remove_channels)s %(preprocess)s push differentiate calc abs push trim -M 0 0 writeasc -b -c %(tempscalefile)s pop subtract -d -P -c %(tempscalefile)s push collapse_channels -h write_crossings -E collapsed %(JumpDetectionThreshold)g stdout pop collapse_channels -l recode 0 0 1 1 0 Inf 0 0 write_crossings collapsed 0.5 stdout pop fftfilter 0 0 30Hz 35Hz subtract -d -P -c %(tempscalefile)s calc abs collapse_channels -h write_crossings -E collapsed %(ArtifactDetectionThreshold)g stdout ''' % { 'remove_channels': 'remove_channel -n ?' + channel_list2arg(remove_channels) if remove_channels else '', 'preprocess': preprocess, 'tempscalefile': tempscalefile.name, 'JumpDetectionThreshold': self.JumpDetectionThreshold, 'ArtifactDetectionThreshold': self.ArtifactDetectionThreshold }) crossings = trgfile.trgfile(script.runrdr()) self.collected = collect_crossings(self.min_blocking_points) self.collected.add_crossings(crossings, self.start_point, self.end_point) tempscalefile.close() # The temp file will be deleted at this point.
def show_artifacts(self, epochlength=0): '''Read the full continuous file and add triggers showing the breakpoints (and -regions). Note that this currently works for avg_q_vogl only, not for avg_q_ui.''' epochsource = avg_q.Epochsource(self.infile, aftertrig=epochlength, continuous=True, trigtransfer=True) epochsource.set_trigpoints(self.collected.get_tuples()) script = avg_q.Script(self.avg_q_instance) script.add_Epochsource(epochsource) script.set_collect('append -l') script.add_postprocess('posplot') script.run()
def get_threshold(self,start_s): '''Derive a workable EPI threshold from the first 5s of data with EPI artefact.''' script=avg_q.Script(self.avg_q_instance) epochsource=avg_q.Epochsource(self.infile,'0s','5s') epochsource.set_trigpoints('%gs' % start_s) script.add_Epochsource(epochsource) script.add_transform(self.collapseit) script.add_transform('''trim -h 0 0''') script.set_collect('minmax') script.add_postprocess('write_generic stdout string') for line in script.runrdr(): minval,maxval=line.split() self.threshold=float(maxval)*0.5 print("Automatically set threshold to %g" % self.threshold)
def add_regressor_avg_q_file(self, regressor_avg_q_file): channelnames = self.avg_q_instance.get_description( regressor_avg_q_file, 'channelnames') for channelname in channelnames: script = avg_q.Script(self.avg_q_instance) script.add_Epochsource(avg_q.Epochsource(regressor_avg_q_file)) script.add_transform('remove_channel -k %s' % escape_channelname(channelname)) script.add_transform( 'detrend' ) # An offset produces irreparable problems with waver's padding script.add_transform('write_generic %s.dat string' % escape_filename(channelname)) script.run() self.add_regressor_file(channelname, '%s.dat' % channelname)
def average_EOG(self): '''Average EOG events with strict checks for duration and surroundings. Returns the number of accepted EOGs.''' if os.path.exists(self.avgEOGfile): ascfile = avg_q.avg_q_file(self.avgEOGfile) return self.avg_q_instance.get_description(ascfile, 'nrofaverages') self.detect_VEOG() epochsource = avg_q.Epochsource(self.Epochsource_list[0].infile, self.VEOG_beforetrig, self.VEOG_aftertrig, offset=self.VEOG_offset) epochsource.set_trigpoints(self.VEOGtriggers) script = avg_q.Script(self.avg_q_instance) script.add_Epochsource(epochsource) if self.protect_channels is not None and len( self.protect_channels) > 0: script.add_transform(''' scale_by -n ?%(protect_channels)s 0 ''' % { 'protect_channels': channel_list2arg(self.protect_channels), }) if self.preprocess: script.add_transform(self.preprocess) # Note that we require the VEOG signal to be "close to baseline" before and after the maximum script.add_transform( ''' baseline_subtract push %(get_VEOG_script)s trim -x 0 100 400 500 calc abs reject_bandwidth -m %(max_VEOG_amp_outside_window)f pop ''' % { 'get_VEOG_script': self.get_VEOG_script, 'max_VEOG_amp_outside_window': self.VEOG_minamp }) script.set_collect('average') script.add_postprocess(''' query nrofaverages stdout writeasc -b %(avgEOGfile)s ''' % {'avgEOGfile': self.avgEOGfile}) rdr = script.runrdr() nrofaverages = 0 for line in rdr: nrofaverages = int(line) return nrofaverages
def write_DC_Corrected(self, outfilename): # Write the output file import numpy import tempfile tempepochfile = tempfile.NamedTemporaryFile() self.save_state() # Replace the epoch source by our own one infile = self.Epochsource_list[0].infile epochsource = avg_q.Epochsource(infile) self.Epochsource_list[0] = epochsource sfreq, points_in_file = self.avg_q_instance.get_description( infile, ('sfreq', 'points_in_file')) # Prepare our epoch source for continuous reading epochsource.beforetrig = 0 epochsource.aftertrig = self.epochlen_s * sfreq epochsource.continuous = True epochsource.epochs = 1 epochsource.trigtransfer = True self.add_transform('subtract %s' % tempepochfile.name) if outfilename.endswith('.cnt'): self.add_transform('write_synamps -a -c %s 1' % outfilename) else: self.add_transform('write_brainvision -a %s IEEE_FLOAT_32' % outfilename) epoch = self.epochs[0] import os if os.path.exists(outfilename): os.unlink(outfilename) for epochno in range(int(points_in_file / sfreq / self.epochlen_s)): # x is computed in seconds x = epochno * self.epochlen_s + numpy.arange( self.epochlen_s * sfreq) / sfreq data = numpy.empty((self.epochlen_s * sfreq, epoch.nr_of_channels)) for channel, s in enumerate(self.interpolators): data[:, channel] = s(x) newepoch = numpy_Script.numpy_epoch(data) newepoch.channelnames = epoch.channelnames newepoch.channelpos = epoch.channelpos newepoch.sfreq = sfreq tmpepochsource = numpy_Script.numpy_Epochsource([newepoch]) tmpscript = avg_q.Script(self.avg_q_instance) tmpscript.add_Epochsource(tmpepochsource) tmpscript.add_transform('writeasc -b %s' % tempepochfile.name) tmpscript.run() epochsource.fromepoch = epochno + 1 self.run() self.restore_state() tempepochfile.close() # The temp file will be deleted at this point.
def getTemplate(self,trigpoint): if not self.haveTemplate: # The template is normalized so that 1 should be obtained when convolved with the original trace get_template_script=''' scale_by invpointsquarenorm scale_by nr_of_points writeasc -b %(templatefilename)s ''' % { 'templatefilename': self.templatefilename } script=avg_q.Script(self.avg_q_instance) epochsource=avg_q.Epochsource(self.infile,self.template_points/2,self.template_points/2) epochsource.set_trigpoints(trigpoint) script.add_Epochsource(epochsource) script.add_transform(self.collapseit) script.add_transform(self.upsampleit) script.add_transform(get_template_script) script.run() self.haveTemplate=True
def get_measures_using_epochfilter(self, cntfile, epochfilter, bands=defaultbands): '''Average epochs from cnt and directly measure the result. If bands is None, don't collapse bands at all but average frequency bins as-is.''' if isinstance(cntfile, cntspectsource): c = cntfile else: c = cntspectsource(cntfile) if c.filename is None: return c.set_epochfilter(epochfilter) if len(c.trigpoints) == 0: return script = avg_q.Script(self) script.add_Epochsource(c) # Drop epochs with Inf bandwidth (e.g. incoming EEG zeroed out) script.add_transform('reject_bandwidth Inf') script.set_collect('average') script.add_postprocess(''' swap_fc ''' + (''' calc exp ''' + self.get_spect_trim(bands) + ''' calc log ''' if bands else ''' trim -x 0+1 48 ''') + ''' query -N nrofaverages write_generic -P stdout string ''') outline = [] for line in script.runrdr(): if '=' in line: varname, value = line.split('=') if varname == 'nrofaverages': outline.append(int(value)) else: yield outline + [float(x) for x in line.split('\t')] outline = []
def average_HEOG(self): '''Average HEOG sections. ''' if os.path.exists(self.avgHEOGfile): return self.detect_HEOG() nrofaverages = 0 ranges = self.get_ranges(self.HEOGtriggers) for direction in ['HEOGp', 'HEOGm']: script = avg_q.Script(self.avg_q_instance) for d, (start, end) in ranges: if d != direction: continue epochsource = avg_q.Epochsource( self.Epochsource_list[0].infile, 0, end - start) epochsource.set_trigpoints(start) script.add_Epochsource(epochsource) if self.protect_channels is not None and len( self.protect_channels) > 0: script.add_transform(''' scale_by -n ?%(protect_channels)s 0 ''' % { 'protect_channels': channel_list2arg(self.protect_channels), }) if self.preprocess: script.add_transform(self.preprocess) script.add_transform(''' trim -a 0 0 ''') script.set_collect('average') script.add_postprocess(''' query nrofaverages stdout writeasc -a -b %(avgHEOGfile)s ''' % {'avgHEOGfile': self.avgHEOGfile}) rdr = script.runrdr() for line in rdr: nrofaverages += int(line) return nrofaverages
def avgEPI(self,crsfile,runindex): self.set_crsfile(crsfile) self.avgEPIfile=self.base + '_AvgEPI%02d.asc' % runindex if os.path.exists(self.avgEPIfile) and self.CheckIfAvgEPICompatible(): print("Reusing average file %s" % self.avgEPIfile) return print("Averaging EPI... -> %s" % self.avgEPIfile) singleEPIepoch=1 # Use first EPI as template by default singleEPIfile=self.base + '_SingleEPI%02d' % runindex residualsfile=self.base + '_residuals%02d' % runindex if not self.checkmode and os.path.exists(residualsfile+'.hdf'): os.unlink(residualsfile+'.hdf') # Store "best" template in case finding one with the strict criterion fails min_rejection_fraction=1 base_avgEPIfile,ext_avgEPIfile=os.path.splitext(self.avgEPIfile) # Repeat averaging if too many EPIs are rejected (specimen itself is bad): while True: print("Using EPI template epoch %d" % singleEPIepoch) # Get the single epoch specimen and its amplitude script=avg_q.Script(self.avg_q_instance) script.add_Epochsource(self.EPI_Epochsource(self.EPIs[singleEPIepoch-1])) script.add_transform(''' writeasc -b -c %(singleEPIfile)s.asc calc abs trim -h 0 0 writeasc -b %(singleEPIfile)s_Amplitude.asc ''' % { 'singleEPIfile': singleEPIfile, }) script.run() ''' #add_channels -l %(singleEPIfile)s.asc #posplot #link_order 2 #pop ''' rejection_script=''' push subtract %(singleEPIfile)s.asc calc abs trim -h 0 0 subtract -d %(singleEPIfile)s_Amplitude.asc # Let each run through all EPIs correspond to exactly "1s" set sfreq %(nr_of_EPIs)d write_hdf -a -c %(residualsfile)s.hdf # Don't consider non-EEG channels to judge the fit collapse_channels -h !?%(remove_channels)s:collapsed reject_bandwidth -m %(avgEPI_Amplitude_Reject_fraction)g pop ''' % { 'singleEPIfile': singleEPIfile, 'nr_of_EPIs': len(self.EPIs), 'residualsfile': residualsfile, 'remove_channels': channel_list2arg(self.remove_channels), 'avgEPI_Amplitude_Reject_fraction': self.avgEPI_Amplitude_Reject_fraction, } script=avg_q.Script(self.avg_q_instance) for EPI in self.EPIs: script.add_Epochsource(self.EPI_Epochsource(EPI)) if self.checkmode: script.add_transform(''' set beforetrig 0 set xdata 1 append -l Post: posplot - ''') script.set_collect('append -l') script.add_postprocess('posplot') else: script.add_transform(rejection_script) script.set_collect('average') script.add_postprocess(''' writeasc -b %(avgEPIfile)s query accepted_epochs stdout query rejected_epochs stdout ''' % { 'avgEPIfile': self.avgEPIfile, }) lines=script.runrdr() accepted_epochs=int(next(lines)) rejected_epochs=int(next(lines)) # Empty the line buffer just in case for line in lines: print(line) os.unlink(singleEPIfile + '.asc') os.unlink(singleEPIfile + '_Amplitude.asc') rejection_fraction=float(rejected_epochs)/(accepted_epochs+rejected_epochs) #print accepted_epochs,rejected_epochs,rejection_fraction if rejection_fraction>self.avgEPI_max_rejection_fraction: if rejection_fraction<min_rejection_fraction: os.rename(self.avgEPIfile, base_avgEPIfile + '_Best.asc') min_rejection_fraction=rejection_fraction else: os.unlink(self.avgEPIfile) # Try the next EPI template... singleEPIepoch+=1 if singleEPIepoch>=len(self.EPIs): if os.path.exists(base_avgEPIfile + '_Best.asc'): # Okay, we pragmatically use the one with the least rejection print("No EPI templates would fit, using best rejection fraction (%g)" % rejection_fraction) os.rename(base_avgEPIfile + '_Best.asc', self.avgEPIfile) break else: print("Oops, not even a best rejection_fraction fit?") raise Exception("No EPI templates would fit, giving up.") print("EPI Rejection fraction too large (%g), retrying..." % rejection_fraction) else: break if os.path.exists(base_avgEPIfile + '_Best.asc'): os.unlink(base_avgEPIfile + '_Best.asc')
def get_epitrigs(self,fromto): runindex=0 for start_s,end_s in fromto: self.haveTemplate=False runindex+=1 if not self.checkmode: crsfilename=self.base + '%02d.crs' % runindex if os.path.exists(crsfilename): print("%s exists!" % crsfilename) continue if not self.threshold: self.get_threshold(start_s) if not self.threshold: raise Exception("threshold is undefined!") outtuples=trgfile.HighresTriggers(self.upsample) # First, only look for the first EPI peak and extract a short template script=avg_q.Script(self.avg_q_instance) epochsource=avg_q.Epochsource(self.infile,'0s','%gms' % (5*self.TR)) epochsource.set_trigpoints('%gs' % start_s) detect_first_EPI_script=''' write_crossings -E -R %(refractory_time)gms collapsed %(threshold)g triggers %(posplot)s query triggers_for_trigfile stdout assert -S nr_of_triggers == 0 null_sink - ''' % { 'refractory_time': self.TR, 'threshold': self.threshold, 'posplot': 'set_comment Finding first EPI peak...\nposplot' if self.checkmode else '' } script.add_Epochsource(epochsource) script.add_transform(self.collapseit) script.add_transform(detect_first_EPI_script) trgfile_crs=trgfile.trgfile(script.runrdr()) trgpoints=trgfile_crs.gettuples() if len(trgpoints)==0: print("Can't find a single trigger in %s, continuing..." % self.base) continue trigpoint=int(start_s*self.sfreq)+trgpoints[0][0] self.getTemplate(trigpoint) # Read the whole fMRI run in steps of TR # Empirical correction for idempotency correct_correct=None while not end_s or trigpoint<end_s*self.sfreq: readpoint,trimcorrection=trgfile.get_ints(trigpoint,self.upsample) # Note that this still needs to be finalized for the '%s' template in write_crossings! epi_detection_script=''' convolve %(templatefilename)s 1 trim %(trimstart)f %(trimlength)f write_crossings %%s collapsed %(convolvethreshold)g triggers %(posplot)s query triggers_for_trigfile stdout ''' % { 'templatefilename': self.templatefilename, 'trimstart': self.upsample*(self.template_points/2)+trimcorrection, 'trimlength': self.upsample*self.refine_points, 'convolvethreshold': self.convolvethreshold, 'posplot': 'set_comment Detecting EPI...\nposplot' if self.checkmode else '' } before=int((self.template_points+self.refine_points)/2) script=avg_q.Script(self.avg_q_instance) epochsource=avg_q.Epochsource(self.infile,before,before+1) epochsource.set_trigpoints(readpoint) script.add_Epochsource(epochsource) script.add_transform(self.collapseit) script.add_transform(self.upsampleit) script.add_transform(epi_detection_script % '-E') # Detect extrema. May fail if no extremum is assumed. trgfile_crs=trgfile.trgfile(script.runrdr()) trgpoints=trgfile_crs.gettuples() if len(trgpoints)==0: # No extremum found. Check whether there is a signal of EPI-like amplitude at all. script=avg_q.Script(self.avg_q_instance) script.add_Epochsource(epochsource) script.add_transform(self.collapseit) script.add_transform(self.upsampleit) script.add_transform(epi_detection_script % '') # Detect crossings to check whether a signal is present at all trgfile_crs=trgfile.trgfile(script.runrdr()) trgpoints=trgfile_crs.gettuples() if len(trgpoints)==0: break # No EPI signal. print("EPI peak detection failed - trying to continue without correction...") correction=correct_correct if correct_correct is not None else 0 else: correction=float(trgpoints[0][0])/self.upsample-self.refine_points/2 if correct_correct is None: correct_correct=correction else: correction-=correct_correct trigpoint+=correction print("Correction %g Position %d" % (correction, trigpoint)) outtuples.append(trigpoint) # Comment this out to prove idempotency: trigpoint+=self.TR_points # Check the end of the scan - does it fit with TR? epi_end_detection_script=''' scale_by invpointmax write_crossings -E collapsed 0.75 triggers %(posplot)s query triggers_for_trigfile stdout ''' % { 'posplot': 'set_comment Detecting end of scan...\nposplot' if self.checkmode else '' } read_TS_before_trigpoint=2 read_TS_after_trigpoint=1 readpoint=trigpoint-read_TS_before_trigpoint*self.TR_points points_to_read=(read_TS_before_trigpoint+read_TS_after_trigpoint)*self.TR_points # Sigh... Detect whether the EEG was stopped *immediately* after the scan if readpoint+points_to_read>self.points_in_file: points_to_read=self.points_in_file-readpoint script=avg_q.Script(self.avg_q_instance) epochsource=avg_q.Epochsource(self.infile,0,points_to_read) epochsource.set_trigpoints(readpoint) script.add_Epochsource(epochsource) script.add_transform(self.collapseit) script.add_transform(epi_end_detection_script) trgfile_crs=trgfile.trgfile(script.runrdr()) trgpoints=trgfile_crs.gettuples() point,code,description=trgpoints[-1] deviation=(point-read_TS_before_trigpoint*self.TS_points)/self.TS_points print("Last peak %gs after last EPI, deviation %g*TS" % ((point-read_TS_before_trigpoint*self.TR_points)/self.sfreq,deviation)) if deviation< -0.05: print("Deviation too large, dropping last EPI - Check TS=%gms!" % self.TS) outtuples.pop(-1) elif deviation>=0: print("Positive deviation, check end_s=" + str(end_s) + "!") self.print_EPIstats(outtuples) if not self.checkmode: trgfile_crs.preamble['TS']=str(self.TS) trgfile_crs.preamble['TR']=str(self.TR) trgfile_crs.preamble['upsample']=str(self.upsample) crsfile=open(crsfilename,mode='w') trgfile_crs.writetuples(outtuples.as_triglist(),crsfile) crsfile.close() if self.haveTemplate: os.unlink(self.templatefilename) self.haveTemplate=False
def cnt2trg(self, sl, cntfile): '''sl must be an slfile.slfile instance yielding the sleep staging for the given night''' c = cntspectsource(cntfile) if c.filename is None: return tfile = c.filename.replace('.cnt', '.trg') if os.path.exists(tfile): # trg file exists; leave 0-length files untouched if sl is still None if sl is None: if os.path.getsize(tfile) > 0: print("%s exists and >0 but sl file not found???" % tfile) return sl_mtime = os.path.getmtime(sl.filename) cnt_mtime = os.path.getmtime(c.filename) trg_mtime = os.path.getmtime(tfile) # Keep the existing trg file if it is newer than both sl and cnt files, # otherwise re-generate it if sl_mtime < trg_mtime and cnt_mtime < trg_mtime: return elif sl is None: # Create 0-length file with open(tfile, 'w') as f: pass return c.aftertrig = 0 # Read the whole cnt file as one epoch script = avg_q.Script(self) self.add_cntbandssource(script, c, bands=self.rejection_bands) script.add_transform(''' write_generic stdout string echo -F stdout End of bands\\n sliding_average -M %(median_length)d 1 write_generic stdout string echo -F stdout End of medbands\\n ''' % { 'median_length': self.rejection_median_length, }) rdr = script.runrdr() nr_of_bands = len(self.rejection_bands) bands = [[] for x in range(nr_of_bands)] for r in rdr: if r == 'End of bands': break r = r.split('\t') for i in range(nr_of_bands): bands[i].append(float(r[i])) medbands = [[] for x in range(nr_of_bands)] for r in rdr: if r == 'End of medbands': break r = r.split('\t') for i in range(nr_of_bands): medbands[i].append(float(r[i])) for r in rdr: pass #print(bands) #print(medbands) medians_of_medianfiltered = [] quartiles_of_medianfiltered = [] diff_threshold = [] for i in range(nr_of_bands): # Remove -inf values, which are caused by disconnected times sorted_medbands = sorted( [x for x in medbands[i] if not math.isinf(x)]) # If this is empty, avoid producing an error and set diff_threshold to zero if sorted_medbands: medians_of_medianfiltered.append(sorted_medbands[int( len(sorted_medbands) / 2)]) quartiles_of_medianfiltered.append(sorted_medbands[int( len(sorted_medbands) / 4)]) diff_threshold.append(medians_of_medianfiltered[i] - quartiles_of_medianfiltered[i]) else: medians_of_medianfiltered.append(None) quartiles_of_medianfiltered.append(None) diff_threshold.append(0) print("medians_of_medianfiltered=" + str(medians_of_medianfiltered)) print("quartiles_of_medianfiltered=" + str(quartiles_of_medianfiltered)) sl_pos = 0 point = 0 nr_of_points = len(bands[0]) tuples = [] time, stage, checks, arousals, myos, eyemovements, remcycle, nremcycle = -1, 0, 0, 0, 0, 0, -1, -1 print(len(sl.tuples)) while point < nr_of_points: checkmark = 0 checkmarks = [0 for i in range(nr_of_bands)] if point < 0: time, stage, checks, arousals, myos, eyemovements, remcycle, nremcycle = -1, 0, 0, 0, 0, 0, -1, -1 elif sl_pos >= len(sl.tuples): # Let the REM cycle remain on the last value it had, otherwise we'd always # be averaging wake phases at the beginning and end of the night... time, stage, checks, arousals, myos, eyemovements = -1, 0, 0, 0, 0, 0 else: time, stage, checks, arousals, myos, eyemovements = sl.tuples[ sl_pos][:6] remcycle, nremcycle = sl.remcycles[sl_pos] for i in range(nr_of_bands): if abs(bands[i][point] - medbands[i][point]) > diff_threshold[i]: checkmark += 1 << i + 3 checkmarks[i] += 1 sl_pos += 1 check = arousals + 2 * checks + checkmark code = (0 if remcycle < 0 else remcycle) * 8 + stage + 1 if check != 0: code = -code tuples.append([ point, code, stage, remcycle, nremcycle, arousals, myos, eyemovements, checks ] + checkmarks) point += 1 # Finally write the result... from .. import trgfile t = trgfile.trgfile() t.preamble['fields'] = "\t".join([ 'point', 'code', 'stage', 'remcycle', 'nremcycle', 'arousals', 'myos', 'emts', 'checks' ] + ['checkmark_' + band[2] for band in self.rejection_bands]) f = open(tfile, "w") t.writetuples(tuples, f) f.close() sl.close()
sys.path.append("R:/avg_q/python") import avg_q # The avg_q_ui.exe binary will be found automatically in the directory # above python/ a = avg_q.avg_q(avg_q='avg_q_ui') import glob # Iterate over all files with extension .edf for infile in glob.glob('N:/*.edf'): # avg_q.Epochsource normally takes an avg_q_file() descriptor as first # argument but for convenience also a file name with type inferred # from the extension # Read a single 2-s epoch from the start of the file epochsource = avg_q.Epochsource(infile, '1s', '1s', continuous=True, epochs=1) script = avg_q.Script(a) script.add_Epochsource(epochsource) script.add_transform(''' query -N sfreq stdout query -N nr_of_points stdout posplot''') # avg_q.Script() sets the collect method to null_sink automatically script.run() a.close()