Example #1
0
 def EPI_Epochsource(self,EPI):
  if self.upsample==1:
   trimcorrection=0
   beforetrig,aftertrig=self.beforetrig_points,self.aftertrig_points
   trimit=''
  else:
   trimcorrection=trgfile.HighresTrigger(EPI)[1]
   # One data point is extra so that trim doesn't add zeros (negative offset)
   trimcorrection+=self.upsample
   trimit='''
>trim %(trimstart)f %(trimlength)f
''' % {
   'trimstart': trimcorrection,
   'trimlength': (self.beforetrig_points+self.aftertrig_points+2)*self.upsample,
   }
   # We need 2 points to each side as 'trim space', once for averaging, second before subtraction
   beforetrig,aftertrig=self.beforetrig_points+2,self.aftertrig_points+2
  branch='''
>detrend -0
%(upsampleit)s
%(trimit)s
''' % {
  'upsampleit': self.upsampleit,
  'trimit': trimit,
  }
  epochsource=avg_q.Epochsource(self.infile,beforetrig,aftertrig)
  epochsource.set_trigpoints(EPI[0])
  epochsource.add_branchtransform(branch)
  return epochsource
Example #2
0
    def classify_Spindles(self):
        f = avg_q.avg_q_file(self.filteredfile, 'BrainVision')
        self.Spindles = []  # Tuples (start_s,duration_ms,Spindle_info dict)
        for collapsedchannel, (start, end) in self.Spindleranges:
            duration_ms = (end - start) / self.sfreq * 1000
            # Count the zero crossings
            detector = avg_q.Detector.Detector(self.avg_q_instance)
            epochsource = avg_q.Epochsource(
                f, '100ms',
                '%gms' % (duration_ms + 200))  # Add 100ms on both ends
            epochsource.set_trigpoints('%gs' % (start / self.sfreq))
            detector.add_Epochsource(epochsource)
            detector.add_transform('''
write_crossings ALL 0 stdout
''')
            crossings = detector.detect()
            trigger_dict = avg_q.Detector.get_trigger_dict(crossings)
            Spindle_info = {}  # Will contain lists [freq_Hz,amplitude]
            for channel, cr in trigger_dict.items():
                period_s = 2 * (cr[-1][0] - cr[0][0]) / (len(cr) -
                                                         1) / detector.sfreq
                Spindle_info[channel] = [1.0 / period_s]

            script = avg_q.Script(self.avg_q_instance)
            script.add_Epochsource(epochsource)
            script.add_transform('''
calc abs
trim -a 0 0
write_generic -P -N stdout string
''')
            for line in script.runrdr():
                channel, amp = line.split('\t')
                Spindle_info[channel].append(float(amp))
            self.Spindles.append(
                (start / self.sfreq, duration_ms, Spindle_info))
Example #3
0
 def sessionAverage(self,infiles):
  ecgfiles=[]
  for infile in infiles:
   self.set_Epochsource(avg_q.Epochsource(infile,continuous=True))
   self.average_ECG()
   ecgfiles.append(self.avgECGfile)
  return self.sessionaverage_ECG(ecgfiles)
Example #4
0
 def transferSection(self,sectionstart, sectionbefore, sectionafter, addmethods=None):
  doscript=addmethods if addmethods else ''
  '''
fftfilter 40Hz 45Hz 1 1
sliding_average 1 10ms
write_generic %(append_arg)s %(correctedfile)s.eeg int16
writeasc %(append_arg)s %(correctedfile)s.asc
write_synamps -c %(append_arg)s %(correctedfile)s.cnt %(sensitivity)g
'''
  writefile='''
sliding_average %(sliding_size)sms %(sliding_step)sms
write_hdf -c %(append_arg)s %(correctedfile)s.hdf
  ''' % {
  'sliding_size': 2000.0/self.correctedfile_sfreq,
  'sliding_step': 1000.0/self.correctedfile_sfreq,
  'append_arg': '-a' if self.append else '',
  'correctedfile': self.correctedfile,
  'sensitivity': 10,
  }

  script=avg_q.Script(self.avg_q_instance)
  epochsource=avg_q.Epochsource(self.infile,sectionbefore,sectionafter)
  epochsource.set_trigpoints(sectionstart)
  script.add_Epochsource(epochsource)
  script.add_transform(doscript)
  script.add_transform(writefile)
  script.run()
Example #5
0
 def average_ECG(self):
  '''Average ECG events with rejection.
     Returns the number of accepted ECGs.'''
  if os.path.exists(self.avgECGfile):
   ascfile=avg_q.avg_q_file(self.avgECGfile)
   return self.avg_q_instance.get_description(ascfile,'nrofaverages')
  self.detect_ECG()
  epochsource=avg_q.Epochsource(self.Epochsource_list[0].infile,self.ECG_beforetrig, self.ECG_aftertrig)
  epochsource.set_trigpoints(self.ECGtriggers)
  script=avg_q.Script(self.avg_q_instance)
  script.add_Epochsource(epochsource)
  if self.protect_channels is not None and len(self.protect_channels)>0:
   script.add_transform('''
scale_by -n ?%(protect_channels)s 0
''' % {
   'protect_channels': channel_list2arg(self.protect_channels),
   })
  script.add_transform('''
baseline_subtract
reject_bandwidth %(reject_bandwidth)f
''' % {
  'reject_bandwidth': self.ECG_reject_bandwidth,
  })
  script.set_collect('average')
  script.add_postprocess('''
query nrofaverages stdout
writeasc -b %(avgECGfile)s
''' % {
  'avgECGfile': self.avgECGfile})
  rdr=script.runrdr()
  nrofaverages=0
  for line in rdr:
   nrofaverages=int(line)
  return nrofaverages
Example #6
0
 def sessionAverageHEOG(self, infiles):
     heogfiles = []
     for infile in infiles:
         self.set_Epochsource(avg_q.Epochsource(infile, continuous=True))
         self.average_HEOG()
         heogfiles.append(self.avgHEOGfile)
     return self.sessionaverage_HEOG(heogfiles)
Example #7
0
 def sessionGratton(self, infiles):
     self.sessionAverage(infiles)
     for infile in infiles:
         self.set_Epochsource(avg_q.Epochsource(infile, continuous=True))
         self.Gratton()
     if self.mapfile:
         os.unlink(self.mapfile)
         self.mapfile = None
     if self.sessionaverage_EOGfile:
         os.unlink(self.sessionaverage_EOGfile)
         self.sessionaverage_EOGfile = None
Example #8
0
    def collect_artifacts(self, remove_channels=None, preprocess=''):
        '''Analyze the given section of the continuous file for artifacts. 
     remove_channels is a list of channel names to exclude for detection.'''
        epochsource = avg_q.Epochsource(self.infile, 0,
                                        self.end_point - self.start_point)
        epochsource.set_trigpoints(self.start_point)
        script = avg_q.Script(self.avg_q_instance)
        script.add_Epochsource(epochsource)
        script.set_collect('append')
        import tempfile
        tempscalefile = tempfile.NamedTemporaryFile()
        script.add_postprocess(
            '''
%(remove_channels)s
%(preprocess)s
push
differentiate
calc abs
push
trim -M 0 0
writeasc -b -c %(tempscalefile)s
pop
subtract -d -P -c %(tempscalefile)s
push
collapse_channels -h
write_crossings -E collapsed %(JumpDetectionThreshold)g stdout
pop
collapse_channels -l
recode 0 0 1 1  0 Inf 0 0
write_crossings collapsed 0.5 stdout
pop
fftfilter 0 0 30Hz 35Hz
subtract -d -P -c %(tempscalefile)s
calc abs
collapse_channels -h
write_crossings -E collapsed %(ArtifactDetectionThreshold)g stdout
''' % {
                'remove_channels':
                'remove_channel -n ?' +
                channel_list2arg(remove_channels) if remove_channels else '',
                'preprocess':
                preprocess,
                'tempscalefile':
                tempscalefile.name,
                'JumpDetectionThreshold':
                self.JumpDetectionThreshold,
                'ArtifactDetectionThreshold':
                self.ArtifactDetectionThreshold
            })
        crossings = trgfile.trgfile(script.runrdr())
        self.collected = collect_crossings(self.min_blocking_points)
        self.collected.add_crossings(crossings, self.start_point,
                                     self.end_point)
        tempscalefile.close()  # The temp file will be deleted at this point.
Example #9
0
 def show_artifacts(self, epochlength=0):
     '''Read the full continuous file and add triggers showing the breakpoints (and -regions).
  Note that this currently works for avg_q_vogl only, not for avg_q_ui.'''
     epochsource = avg_q.Epochsource(self.infile,
                                     aftertrig=epochlength,
                                     continuous=True,
                                     trigtransfer=True)
     epochsource.set_trigpoints(self.collected.get_tuples())
     script = avg_q.Script(self.avg_q_instance)
     script.add_Epochsource(epochsource)
     script.set_collect('append -l')
     script.add_postprocess('posplot')
     script.run()
Example #10
0
 def get_threshold(self,start_s):
   '''Derive a workable EPI threshold from the first 5s of data with EPI artefact.'''
   script=avg_q.Script(self.avg_q_instance)
   epochsource=avg_q.Epochsource(self.infile,'0s','5s')
   epochsource.set_trigpoints('%gs' % start_s)
   script.add_Epochsource(epochsource)
   script.add_transform(self.collapseit)
   script.add_transform('''trim -h 0 0''')
   script.set_collect('minmax')
   script.add_postprocess('write_generic stdout string')
   for line in script.runrdr():
    minval,maxval=line.split()
    self.threshold=float(maxval)*0.5
    print("Automatically set threshold to %g" % self.threshold)
Example #11
0
 def add_regressor_avg_q_file(self, regressor_avg_q_file):
     channelnames = self.avg_q_instance.get_description(
         regressor_avg_q_file, 'channelnames')
     for channelname in channelnames:
         script = avg_q.Script(self.avg_q_instance)
         script.add_Epochsource(avg_q.Epochsource(regressor_avg_q_file))
         script.add_transform('remove_channel -k %s' %
                              escape_channelname(channelname))
         script.add_transform(
             'detrend'
         )  # An offset produces irreparable problems with waver's padding
         script.add_transform('write_generic %s.dat string' %
                              escape_filename(channelname))
         script.run()
         self.add_regressor_file(channelname, '%s.dat' % channelname)
Example #12
0
    def average_EOG(self):
        '''Average EOG events with strict checks for duration and surroundings.
     Returns the number of accepted EOGs.'''
        if os.path.exists(self.avgEOGfile):
            ascfile = avg_q.avg_q_file(self.avgEOGfile)
            return self.avg_q_instance.get_description(ascfile, 'nrofaverages')
        self.detect_VEOG()
        epochsource = avg_q.Epochsource(self.Epochsource_list[0].infile,
                                        self.VEOG_beforetrig,
                                        self.VEOG_aftertrig,
                                        offset=self.VEOG_offset)
        epochsource.set_trigpoints(self.VEOGtriggers)
        script = avg_q.Script(self.avg_q_instance)
        script.add_Epochsource(epochsource)
        if self.protect_channels is not None and len(
                self.protect_channels) > 0:
            script.add_transform('''
scale_by -n ?%(protect_channels)s 0
''' % {
                'protect_channels':
                channel_list2arg(self.protect_channels),
            })
        if self.preprocess:
            script.add_transform(self.preprocess)
        # Note that we require the VEOG signal to be "close to baseline" before and after the maximum
        script.add_transform(
            '''
baseline_subtract
push
%(get_VEOG_script)s
trim -x 0 100 400 500
calc abs
reject_bandwidth -m %(max_VEOG_amp_outside_window)f
pop
''' % {
                'get_VEOG_script': self.get_VEOG_script,
                'max_VEOG_amp_outside_window': self.VEOG_minamp
            })
        script.set_collect('average')
        script.add_postprocess('''
query nrofaverages stdout
writeasc -b %(avgEOGfile)s
''' % {'avgEOGfile': self.avgEOGfile})
        rdr = script.runrdr()
        nrofaverages = 0
        for line in rdr:
            nrofaverages = int(line)
        return nrofaverages
Example #13
0
 def write_DC_Corrected(self, outfilename):
     # Write the output file
     import numpy
     import tempfile
     tempepochfile = tempfile.NamedTemporaryFile()
     self.save_state()
     # Replace the epoch source by our own one
     infile = self.Epochsource_list[0].infile
     epochsource = avg_q.Epochsource(infile)
     self.Epochsource_list[0] = epochsource
     sfreq, points_in_file = self.avg_q_instance.get_description(
         infile, ('sfreq', 'points_in_file'))
     # Prepare our epoch source for continuous reading
     epochsource.beforetrig = 0
     epochsource.aftertrig = self.epochlen_s * sfreq
     epochsource.continuous = True
     epochsource.epochs = 1
     epochsource.trigtransfer = True
     self.add_transform('subtract %s' % tempepochfile.name)
     if outfilename.endswith('.cnt'):
         self.add_transform('write_synamps -a -c %s 1' % outfilename)
     else:
         self.add_transform('write_brainvision -a %s IEEE_FLOAT_32' %
                            outfilename)
     epoch = self.epochs[0]
     import os
     if os.path.exists(outfilename): os.unlink(outfilename)
     for epochno in range(int(points_in_file / sfreq / self.epochlen_s)):
         # x is computed in seconds
         x = epochno * self.epochlen_s + numpy.arange(
             self.epochlen_s * sfreq) / sfreq
         data = numpy.empty((self.epochlen_s * sfreq, epoch.nr_of_channels))
         for channel, s in enumerate(self.interpolators):
             data[:, channel] = s(x)
         newepoch = numpy_Script.numpy_epoch(data)
         newepoch.channelnames = epoch.channelnames
         newepoch.channelpos = epoch.channelpos
         newepoch.sfreq = sfreq
         tmpepochsource = numpy_Script.numpy_Epochsource([newepoch])
         tmpscript = avg_q.Script(self.avg_q_instance)
         tmpscript.add_Epochsource(tmpepochsource)
         tmpscript.add_transform('writeasc -b %s' % tempepochfile.name)
         tmpscript.run()
         epochsource.fromepoch = epochno + 1
         self.run()
     self.restore_state()
     tempepochfile.close()  # The temp file will be deleted at this point.
Example #14
0
 def getTemplate(self,trigpoint):
  if not self.haveTemplate:
   # The template is normalized so that 1 should be obtained when convolved with the original trace
   get_template_script='''
scale_by invpointsquarenorm
scale_by nr_of_points
writeasc -b %(templatefilename)s
''' % { 'templatefilename': self.templatefilename }
   script=avg_q.Script(self.avg_q_instance)
   epochsource=avg_q.Epochsource(self.infile,self.template_points/2,self.template_points/2)
   epochsource.set_trigpoints(trigpoint)
   script.add_Epochsource(epochsource)
   script.add_transform(self.collapseit)
   script.add_transform(self.upsampleit)
   script.add_transform(get_template_script)
   script.run()
   self.haveTemplate=True
Example #15
0
    def average_HEOG(self):
        '''Average HEOG sections.
  '''
        if os.path.exists(self.avgHEOGfile):
            return
        self.detect_HEOG()
        nrofaverages = 0
        ranges = self.get_ranges(self.HEOGtriggers)
        for direction in ['HEOGp', 'HEOGm']:
            script = avg_q.Script(self.avg_q_instance)
            for d, (start, end) in ranges:
                if d != direction: continue
                epochsource = avg_q.Epochsource(
                    self.Epochsource_list[0].infile, 0, end - start)
                epochsource.set_trigpoints(start)
                script.add_Epochsource(epochsource)
            if self.protect_channels is not None and len(
                    self.protect_channels) > 0:
                script.add_transform('''
scale_by -n ?%(protect_channels)s 0
''' % {
                    'protect_channels':
                    channel_list2arg(self.protect_channels),
                })
            if self.preprocess:
                script.add_transform(self.preprocess)
            script.add_transform('''
trim -a 0 0
''')
            script.set_collect('average')
            script.add_postprocess('''
query nrofaverages stdout
writeasc -a -b %(avgHEOGfile)s
''' % {'avgHEOGfile': self.avgHEOGfile})
            rdr = script.runrdr()
            for line in rdr:
                nrofaverages += int(line)
        return nrofaverages
Example #16
0
    def add_Paradigm_Epochsource(self,
                                 infile,
                                 paradigm_instance,
                                 condition,
                                 event0index,
                                 eventindex,
                                 beforetrig='0.2s',
                                 aftertrig='1s',
                                 preprocess='baseline_subtract'):
        '''
  This encapsulates reading and shifting all single epochs of a given condition and eventindex, ready for averaging.
  event0index is the 'reference' event around which the epochs are actually read, baseline subtracted
  and then recentered around event eventindex.
  If event0index==eventindex, obviously, epochs are read normally without shift.
  shiftwidth_points is returned, giving amount by which the re-centered event was shifted.
  '''
        self.sfreq = self.avg_q_instance.get_description(infile, 'sfreq')
        beforetrig_points = self.avg_q_instance.time_to_points(
            beforetrig, self.sfreq)
        aftertrig_points = self.avg_q_instance.time_to_points(
            aftertrig, self.sfreq)

        def get_event(thistrial, i):
            ''' Since we keep ignored codes (codes that are neither stimuli nor
   responses in the paradigm) in the trial, simple indexing does not suffice
   but we need to clean the trial first.
   '''
            cleantrial = [
                x for x in thistrial
                if not paradigm_instance.is_ignored_code(x[1])
            ]
            return cleantrial[i]

        if eventindex == event0index:
            point_list = [
                get_event(trial, eventindex)
                for trial in paradigm_instance.trials[condition]
            ]
            epochsource = avg_q.Epochsource(infile, beforetrig, aftertrig)
            epochsource.set_trigpoints(point_list)
            epochsource.add_branchtransform(preprocess)
            self.add_Epochsource(epochsource)
            shiftwidth_points = 0
        else:
            latency_points = [
                get_event(trial, eventindex)[0] -
                get_event(trial, event0index)[0]
                for trial in paradigm_instance.trials[condition]
            ]
            shiftwidth_points = round(self.calc_shiftwidth(latency_points))
            # Warn if our desired shift point is not within the averaged epoch
            if aftertrig_points <= shiftwidth_points:
                print(
                    "Warning: shiftwidth_points=%d but aftertrig_points=%d!" %
                    (shiftwidth_points, aftertrig_points))
            # This is the total target epoch length:
            trimlength = beforetrig_points + aftertrig_points
            win_before_trig_points = shiftwidth_points + beforetrig_points
            win_after_trig_points = trimlength - win_before_trig_points
            for trial in paradigm_instance.trials[condition]:
                go_point = get_event(trial, event0index)[0]
                secondpoint = get_event(trial, eventindex)[0]
                pointdiff = secondpoint - go_point

                winstart_points = beforetrig_points + pointdiff - win_before_trig_points
                winend_points = beforetrig_points + pointdiff + win_after_trig_points
                # Numerically, if the response is sufficiently earlier than the shift width, we get a negative epoch
                # length for reading. Don't do this.
                if winend_points <= 0:
                    print(
                        "Warning: Not reading epoch completely outside of target window!"
                    )
                    continue

                branch_text = preprocess + '''
trim %(winstart_points)d %(trimlength)d
set beforetrig %(beforetrig)s
set xdata 1
''' % {
                    'winstart_points': winstart_points,
                    'trimlength': trimlength,
                    'beforetrig': beforetrig
                }
                epochsource = avg_q.Epochsource(
                    infile, beforetrig_points,
                    winend_points - beforetrig_points)
                epochsource.set_trigpoints(go_point)
                epochsource.add_branchtransform(branch_text)
                self.add_Epochsource(epochsource)
        return shiftwidth_points
Example #17
0
 def get_epitrigs(self,fromto):
  runindex=0
  for start_s,end_s in fromto:
   self.haveTemplate=False
   runindex+=1

   if not self.checkmode:
    crsfilename=self.base + '%02d.crs' % runindex
    if os.path.exists(crsfilename):
     print("%s exists!" % crsfilename)
     continue

   if not self.threshold:
    self.get_threshold(start_s)
    if not self.threshold:
     raise Exception("threshold is undefined!")

   outtuples=trgfile.HighresTriggers(self.upsample)

   # First, only look for the first EPI peak and extract a short template
   script=avg_q.Script(self.avg_q_instance)
   epochsource=avg_q.Epochsource(self.infile,'0s','%gms' % (5*self.TR))
   epochsource.set_trigpoints('%gs' % start_s)
   detect_first_EPI_script='''
write_crossings -E -R %(refractory_time)gms collapsed %(threshold)g triggers
%(posplot)s
query triggers_for_trigfile stdout
assert -S nr_of_triggers == 0
null_sink
-
''' % {
   'refractory_time': self.TR,
   'threshold': self.threshold,
   'posplot': 'set_comment Finding first EPI peak...\nposplot' if self.checkmode else ''
   }
   script.add_Epochsource(epochsource)
   script.add_transform(self.collapseit)
   script.add_transform(detect_first_EPI_script)
   trgfile_crs=trgfile.trgfile(script.runrdr())
   trgpoints=trgfile_crs.gettuples()
   if len(trgpoints)==0:
    print("Can't find a single trigger in %s, continuing..." % self.base)
    continue
   trigpoint=int(start_s*self.sfreq)+trgpoints[0][0]
   self.getTemplate(trigpoint)
   
   # Read the whole fMRI run in steps of TR
   # Empirical correction for idempotency
   correct_correct=None
   while not end_s or trigpoint<end_s*self.sfreq:
    readpoint,trimcorrection=trgfile.get_ints(trigpoint,self.upsample)
    # Note that this still needs to be finalized for the '%s' template in write_crossings!
    epi_detection_script='''
convolve %(templatefilename)s 1
trim %(trimstart)f %(trimlength)f
write_crossings %%s collapsed %(convolvethreshold)g triggers
%(posplot)s
query triggers_for_trigfile stdout
''' % {
    'templatefilename': self.templatefilename,
    'trimstart': self.upsample*(self.template_points/2)+trimcorrection,
    'trimlength': self.upsample*self.refine_points,
    'convolvethreshold': self.convolvethreshold,
    'posplot': 'set_comment Detecting EPI...\nposplot' if self.checkmode else ''
    }
    before=int((self.template_points+self.refine_points)/2)
    script=avg_q.Script(self.avg_q_instance)
    epochsource=avg_q.Epochsource(self.infile,before,before+1)
    epochsource.set_trigpoints(readpoint)
    script.add_Epochsource(epochsource)
    script.add_transform(self.collapseit)
    script.add_transform(self.upsampleit)
    script.add_transform(epi_detection_script % '-E') # Detect extrema. May fail if no extremum is assumed.
    trgfile_crs=trgfile.trgfile(script.runrdr())
    trgpoints=trgfile_crs.gettuples()
    if len(trgpoints)==0:
     # No extremum found. Check whether there is a signal of EPI-like amplitude at all.
     script=avg_q.Script(self.avg_q_instance)
     script.add_Epochsource(epochsource)
     script.add_transform(self.collapseit)
     script.add_transform(self.upsampleit)
     script.add_transform(epi_detection_script % '') # Detect crossings to check whether a signal is present at all
     trgfile_crs=trgfile.trgfile(script.runrdr())
     trgpoints=trgfile_crs.gettuples()
     if len(trgpoints)==0: break # No EPI signal.
     print("EPI peak detection failed - trying to continue without correction...")
     correction=correct_correct if correct_correct is not None else 0
    else:
     correction=float(trgpoints[0][0])/self.upsample-self.refine_points/2
    if correct_correct is None:
     correct_correct=correction
    else:
     correction-=correct_correct
     trigpoint+=correction
    print("Correction %g Position %d" % (correction, trigpoint))
    outtuples.append(trigpoint)
    # Comment this out to prove idempotency:
    trigpoint+=self.TR_points

   # Check the end of the scan - does it fit with TR?
   epi_end_detection_script='''
scale_by invpointmax
write_crossings -E collapsed 0.75 triggers
%(posplot)s
query triggers_for_trigfile stdout
''' % {
   'posplot': 'set_comment Detecting end of scan...\nposplot' if self.checkmode else ''
   }
   read_TS_before_trigpoint=2
   read_TS_after_trigpoint=1
   readpoint=trigpoint-read_TS_before_trigpoint*self.TR_points
   points_to_read=(read_TS_before_trigpoint+read_TS_after_trigpoint)*self.TR_points
   # Sigh... Detect whether the EEG was stopped *immediately* after the scan
   if readpoint+points_to_read>self.points_in_file:
    points_to_read=self.points_in_file-readpoint
   script=avg_q.Script(self.avg_q_instance)
   epochsource=avg_q.Epochsource(self.infile,0,points_to_read)
   epochsource.set_trigpoints(readpoint)
   script.add_Epochsource(epochsource)
   script.add_transform(self.collapseit)
   script.add_transform(epi_end_detection_script)
   trgfile_crs=trgfile.trgfile(script.runrdr())
   trgpoints=trgfile_crs.gettuples()
   point,code,description=trgpoints[-1]
   deviation=(point-read_TS_before_trigpoint*self.TS_points)/self.TS_points
   print("Last peak %gs after last EPI, deviation %g*TS" % ((point-read_TS_before_trigpoint*self.TR_points)/self.sfreq,deviation))
   if deviation< -0.05:
    print("Deviation too large, dropping last EPI - Check TS=%gms!" % self.TS)
    outtuples.pop(-1)
   elif deviation>=0:
    print("Positive deviation, check end_s=" + str(end_s) + "!")

   self.print_EPIstats(outtuples)
   if not self.checkmode:
    trgfile_crs.preamble['TS']=str(self.TS)
    trgfile_crs.preamble['TR']=str(self.TR)
    trgfile_crs.preamble['upsample']=str(self.upsample)
    crsfile=open(crsfilename,mode='w')
    trgfile_crs.writetuples(outtuples.as_triglist(),crsfile)
    crsfile.close()

   if self.haveTemplate:
    os.unlink(self.templatefilename)
    self.haveTemplate=False
Example #18
0
sys.path.append("R:/avg_q/python")

import avg_q

# The avg_q_ui.exe binary will be found automatically in the directory
# above python/
a = avg_q.avg_q(avg_q='avg_q_ui')

import glob
# Iterate over all files with extension .edf
for infile in glob.glob('N:/*.edf'):
    # avg_q.Epochsource normally takes an avg_q_file() descriptor as first
    # argument but for convenience also a file name with type inferred
    # from the extension
    # Read a single 2-s epoch from the start of the file
    epochsource = avg_q.Epochsource(infile,
                                    '1s',
                                    '1s',
                                    continuous=True,
                                    epochs=1)
    script = avg_q.Script(a)
    script.add_Epochsource(epochsource)
    script.add_transform('''
query -N sfreq stdout
query -N nr_of_points stdout
posplot''')
    # avg_q.Script() sets the collect method to null_sink automatically
    script.run()

a.close()