def make_labels(self, ilines): """parse a list of the form LABEL : VALUES ... and return a LABEL list (with no trailing separator (':')) initialize maxcounts, subjcounts here """ llist = [] for lind, lstr in enumerate(ilines): # get label and value list rv, label, vals = self.get_label_vals(lstr) if rv < 1: continue nvals = len(vals) # label = self.find_parent_label(label) if self.verb > 2: print '++ label: %s, %d val(s)' % (label, nvals) llist.append(label) self.maxcounts[label] = nvals self.subjcounts[label] = 0 if not UTIL.vals_are_unique(llist): print '** warning: labels are not unique, will use only last values' llist = UTIL.get_unique_sublist(llist) return 0, llist
def partition(self, part_file, prefix): """partition timing based on part_file labels, write each part to prefix_label.1D""" if not self.ready: print('** Timing element not ready for partitioning') return 1 labels = read_value_file(part_file) if labels == None: print("** failed to read partition label file '%s'" % part_file) return 1 nlabr = len(labels) if self.verb > 3: print('-- partition: %d labels: %s' % (nlabr, labels)) # first test nrows, then test lengths per row if self.nrows != nlabr: print('** Timing nrows differs from partition nrows (%d, %d)' % \ (self.nrows,nlabr)) return 1 for ind in range(self.nrows): if len(self.data[ind]) != len(labels[ind]): print("** timing and label row lengths differ at line %d" % (ind + 1)) print(" (%d != %d)" % (len(self.data[ind]), len(labels[ind]))) return 1 # make unique label list ulabs = [] for line in labels: ulabs.extend(line) ulabs = UTIL.get_unique_sublist(ulabs) if self.verb > 2: print('-- partition: unique label list: %s' % ulabs) if ulabs.count('0'): ulabs.remove('0') if self.verb > 1: print('++ Timing: partitioning with %s' % part_file) # ------------------------------------------------------------ # do the work, starting with copy: # for each label, extract those times and write out as timing file dupe = self.copy() # keep results in new class instance for lab in ulabs: # extract timing for this label 'lab' mdata = [] for r in range(nlabr): drow = [] # make one row of times for this label for c in range(len(labels[r])): if labels[r][c] == lab: drow.append(self.mdata[r][c]) mdata.append(drow) # and append the new row del (dupe.mdata) # out with the old, dupe.mdata = mdata # and in with the new dupe.write_times('%s_%s.1D' % (prefix, lab)) # and write, yay del (dupe) # nuke the temporary instance return 0
def show_data_info(self, header=1): """checks that are specific to data - class data existence and tree root - assume $HOME if not found - disk space under data root - maybe check for mounted file system - atlases (maybe just @Find TT_N27+tlrc?) """ if header: print(UTIL.section_divider('data checks', hchar='-')) # locate various data trees, and possibly show recent history rv = 0 rv += self.show_data_dir_info('AFNI_data6', 'history.txt') rv += self.show_data_dir_info('AFNI_demos', 'history.txt') rv += self.show_data_dir_info('suma_demo', 'README.archive_creation') rv += self.show_data_dir_info('afni_handouts') if rv: self.comments.append('insufficient data for AFNI bootcamp') evar = 'AFNI_ATLAS_DIR' tryenv = 0 # might suggest setting evar haveenv = evar in os.environ if haveenv: edir = os.environ[evar] else: edir = '' # look for atlases in multiple directories atlas = 'TT_N27+tlrc' if os.path.isfile('%s/%s.HEAD' % (edir, atlas)): glist = [edir] else: glist = [] cmd = '@FindAfniDsetPath %s' % atlas s, so, se = UTIL.limited_shell_exec(cmd, nlines=1) if s: tryenv = 1 # failed elif len(so) > 0: glist.append(so[0]) for ddir in ['/usr/share/afni/atlases', '/usr/local/afni/atlases']: if os.path.isfile('%s/%s.HEAD' % (ddir, atlas)): glist.append(ddir) if tryenv: self.comments.append('consider setting %s to %s' % (evar, ddir)) # fix to work with found after the fact glist = UTIL.get_unique_sublist(glist) if len(glist) == 0: print('atlas : did not find %s' % atlas) self.comments.append('possibly missing atlases') else: for ddir in glist: print('atlas : found %-12s under %s' % (atlas, ddir)) if haveenv: print("\natlas var: %s = %s" % (evar, edir)) print('')
def partition(self, part_file, prefix): """partition timing based on part_file labels, write each part to prefix_label.1D""" if not self.ready: print '** Timing element not ready for partitioning' return 1 labels = read_value_file(part_file) if labels == None: print "** failed to read partition label file '%s'" % part_file return 1 nlabr = len(labels) if self.verb > 3: print '-- partition: %d labels: %s' % (nlabr, labels) # first test nrows, then test lengths per row if self.nrows != nlabr: print '** Timing nrows differs from partition nrows (%d, %d)' % \ (self.nrows,nlabr) return 1 for ind in range(self.nrows): if len(self.data[ind]) != len(labels[ind]): print "** timing and label row lengths differ at line %d"%(ind+1) return 1 # make unique label list ulabs = [] for line in labels: ulabs.extend(line) ulabs = UTIL.get_unique_sublist(ulabs) if self.verb > 2: print '-- partition: unique label list: %s' % ulabs if ulabs.count('0'): ulabs.remove('0') if self.verb > 1: print '++ Timing: partitioning with %s' % part_file # ------------------------------------------------------------ # do the work, starting with copy: # for each label, extract those times and write out as timing file dupe = self.copy() # keep results in new class instance for lab in ulabs: # extract timing for this label 'lab' mdata = [] for r in range(nlabr): drow = [] # make one row of times for this label for c in range(len(labels[r])): if labels[r][c] == lab: drow.append(self.mdata[r][c]) mdata.append(drow) # and append the new row del(dupe.mdata) # out with the old, dupe.mdata = mdata # and in with the new dupe.write_times('%s_%s.1D' % (prefix,lab)) # and write, yay del(dupe) # nuke the temporary instance return 0
def show_data_info(self, header=1): """checks that are specific to data - class data existence and tree root - assume $HOME if not found - disk space under data root - maybe check for mounted file system - atlases (maybe just @Find TT_N27+tlrc?) """ if header: print UTIL.section_divider('data checks', hchar='-') # locate various data trees, and possibly show recent history rv = 0 rv += self.show_data_dir_info('AFNI_data6', 'history.txt') rv += self.show_data_dir_info('suma_demo', 'README.archive_creation') rv += self.show_data_dir_info('FATCAT_DEMO', 'README.timestamp') rv += self.show_data_dir_info('afni_handouts') if rv: self.comments.append('insufficient data for AFNI bootcamp') evar = 'AFNI_ATLAS_DIR' tryenv = 0 # might suggest setting evar haveenv = os.environ.has_key(evar) if haveenv: edir = os.environ[evar] else: edir = '' # look for atlases in multiple directories atlas = 'TT_N27+tlrc' if os.path.isfile('%s/%s.HEAD'%(edir,atlas)): glist = [edir] else: glist = [] cmd = '@FindAfniDsetPath %s' % atlas s, so, se = UTIL.limited_shell_exec(cmd, nlines=1) if s: tryenv = 1 # failed elif len(so) > 0: glist.append(so[0]) for ddir in ['/usr/share/afni/atlases', '/usr/local/afni/atlases']: if os.path.isfile('%s/%s.HEAD'%(ddir,atlas)): glist.append(ddir) if tryenv: self.comments.append('consider setting %s to %s' % (evar,ddir)) # fix to work with found after the fact glist = UTIL.get_unique_sublist(glist) if len(glist) == 0: print 'atlas : did not find %s' % atlas self.comments.append('possibly missing atlases') else: for ddir in glist: print 'atlas : found %-12s under %s' % (atlas, ddir) if haveenv: print "\natlas var: %s = %s" % (evar, edir) print
def find_data_dir(self, ddir, gdirs=[], depth=2): """search under a list of glob directories for the given ddir""" if ddir == '' or len(gdirs) == 0: return None dlist = [] for pdir in gdirs: droot = pdir for d in range(depth + 1): dlist.extend(glob.glob('%s/%s' % (droot, ddir))) droot += '/*' if self.verb > 3: print('-- found %s dirs %s' % (ddir, dlist)) dlist = UTIL.get_unique_sublist(dlist) if self.verb > 2: print('-- found trimmed %s dirs %s' % (ddir, dlist)) if len(dlist) == 0: return None dlen = len(ddir) + 1 return dlist[0][0:-dlen]
def find_data_dir(self, ddir, gdirs=[], depth=2): """search under a list of glob directories for the given ddir""" if ddir == '' or len(gdirs) == 0: return None dlist = [] for pdir in gdirs: droot = pdir for d in range(depth+1): dlist.extend(glob.glob('%s/%s' % (droot, ddir))) droot += '/*' if self.verb > 3: print '-- found %s dirs %s' % (ddir, dlist) dlist = UTIL.get_unique_sublist(dlist) if self.verb > 2: print '-- found trimmed %s dirs %s' % (ddir, dlist) if len(dlist) == 0: return None dlen = len(ddir)+1 return dlist[0][0:-dlen]