def pearson_correlation(datadir, outdir, usemask=True, excludef='', exclude_idx=-1): slidir = datadir + os.path.sep + au.slices_str() subjsfile = datadir + os.path.sep + au.subjects_str() labelsfile = datadir + os.path.sep + au.labels_str() lst = os.listdir(slidir) n = au.count_match(lst, au.data_str() + '_' + au.slice_regex()) exclude_log = '' if exclude_idx > -1: exclude_log = ' excluding subject ' + str(exclude_idx) au.log.info('Calculating correlation of ' + slidir + os.path.sep + au.data_str() + '_' + au.slice_regex() + exclude_log) for i in range(n): slino = au.zeropad(i) dataf = slidir + os.path.sep + au.data_str() + '_' + au.slice_str( ) + '_' + slino + au.ext_str() maskf = slidir + os.path.sep + au.mask_str() + '_' + au.slice_str( ) + '_' + slino + au.ext_str() outf = outdir + os.path.sep + au.pearson_str() + '_' + au.slice_str( ) + '_' + slino if exclude_idx > -1: outf += '_' + au.excluded_str() + str(exclude_idx) + au.ext_str() else: outf += au.ext_str() if not os.path.isfile(dataf): au.log.error('Could not find ' + dataf) continue if not usemask: maskf = '' try: measure_pearson(dataf, labelsfile, outf, maskf, excludef, exclude_idx) except: au.log.error( 'pearson_correlation: Error measuring correlation on ' + dataf) au.log.error("Unexpected error: ", sys.exc_info()[0]) exit(1)
def pearson_correlation (datadir, outdir, usemask=True, excludef='', exclude_idx=-1): slidir = datadir + os.path.sep + au.slices_str() subjsfile = datadir + os.path.sep + au.subjects_str() labelsfile = datadir + os.path.sep + au.labels_str() lst = os.listdir(slidir) n = au.count_match(lst, au.data_str() + '_' + au.slice_regex()) exclude_log = '' if exclude_idx > -1: exclude_log = ' excluding subject ' + str(exclude_idx) au.log.info ('Calculating correlation of ' + slidir + os.path.sep + au.data_str() + '_' + au.slice_regex() + exclude_log) for i in range(n): slino = au.zeropad(i) dataf = slidir + os.path.sep + au.data_str() + '_' + au.slice_str() + '_' + slino + au.ext_str() maskf = slidir + os.path.sep + au.mask_str() + '_' + au.slice_str() + '_' + slino + au.ext_str() outf = outdir + os.path.sep + au.pearson_str() + '_' + au.slice_str() + '_' + slino if exclude_idx > -1: outf += '_' + au.excluded_str() + str(exclude_idx) + au.ext_str() else: outf += au.ext_str() if not os.path.isfile(dataf): au.log.error('Could not find ' + dataf) continue if not usemask: maskf = '' try: measure_pearson(dataf, labelsfile, outf, maskf, excludef, exclude_idx) except: au.log.error('pearson_correlation: Error measuring correlation on ' + dataf) au.log.error("Unexpected error: ", sys.exc_info()[0] ) exit(1)
def group_stats (datadir, groupname, groupsize, outdir=''): lst = os.listdir(datadir) n = au.count_match(lst, groupname + 's_' + au.slice_regex() + au.ext_str()) if not outdir: outdir = datadir au.log.info ('Calculating stats from group ' + groupname + ' in ' + outdir) for i in range(n): slino = au.zeropad(i) dataf = datadir + os.path.sep + groupname + 's_' + au.slice_str() + '_' + slino + au.ext_str() volstats (dataf, groupname, groupsize, outdir)
def group_stats(datadir, groupname, groupsize, outdir=''): lst = os.listdir(datadir) n = au.count_match(lst, groupname + 's_' + au.slice_regex() + au.ext_str()) if not outdir: outdir = datadir au.log.info('Calculating stats from group ' + groupname + ' in ' + outdir) for i in range(n): slino = au.zeropad(i) dataf = datadir + os.path.sep + groupname + 's_' + au.slice_str( ) + '_' + slino + au.ext_str() volstats(dataf, groupname, groupsize, outdir)
def merge_stats_slices (datadir, group): slice_str = au.slice_str() groupfregex = group + 's_' + slice_str + '_????' + '.' #This is a 4D volume with all subjects, it can be a big file, so I'm not creating it #merge_slices (datadir, groupfregex, group + 's') au.imrm(datadir + os.path.sep + groupfregex) [statfnames, mnames] = get_stats_fnames (group, outdir='') statfnames = statfnames[0] out = [] for i in np.arange(len(statfnames)): fname = statfnames[i] m = mnames[i] regex = group + 's_' + slice_str + '_????' + '_' + m o = merge_slices (datadir, regex , fname, datadir, cleanup=False) au.imrm(datadir + os.path.sep + regex) out.append(o) return out
def merge_stats_slices(datadir, group): slice_str = au.slice_str() groupfregex = group + 's_' + slice_str + '_????' + '.' #This is a 4D volume with all subjects, it can be a big file, so I'm not creating it #merge_slices (datadir, groupfregex, group + 's') au.imrm(datadir + os.path.sep + groupfregex) [statfnames, mnames] = get_stats_fnames(group, outdir='') statfnames = statfnames[0] out = [] for i in np.arange(len(statfnames)): fname = statfnames[i] m = mnames[i] regex = group + 's_' + slice_str + '_????' + '_' + m o = merge_slices(datadir, regex, fname, datadir, cleanup=False) au.imrm(datadir + os.path.sep + regex) out.append(o) return out
if (excluf): outf_exclude = au.exclude_str() if expname: outf_exclude += '_' + expname if foldnumber: outf_exclude += '_' + foldnumber np.savetxt(outdir + os.path.sep + outf_exclude, excluded, fmt='%i') np.savetxt(mdir + os.path.sep + au.exclude_str(), excluded, fmt='%i') excluf = mdir + os.path.sep + au.exclude_str() step = au.maskmerging_str() + ' ' + measure_fname + ' ' + mdir if usemask and not au.is_done(chkf, step): maskregex = au.mask_str() + '_' + au.slice_str() + '*' post.merge_slices(slidir, maskregex, au.mask_str(), mdir, False) au.checklist_add(chkf, step) #CORRELATION #read the measure argument and start processing if measure == 'pea': #measure pearson correlation for each population slice step = au.measureperslice_str() + step_params if not au.is_done(chkf, step): pear.pearson_correlation(outdir, mdir, usemask, excluf, leave) au.checklist_add(chkf, step) #merge all correlation slice measures step = au.postmerging_str() + step_params if not au.is_done(chkf, step):
#saving exclude files in mdir outf_exclude = '' if (excluf): outf_exclude = au.exclude_str() if expname: outf_exclude += '_' + expname if foldnumber: outf_exclude += '_' + foldnumber np.savetxt(outdir + os.path.sep + outf_exclude , excluded, fmt='%i') np.savetxt(mdir + os.path.sep + au.exclude_str(), excluded, fmt='%i') excluf = mdir + os.path.sep + au.exclude_str() step = au.maskmerging_str() + ' ' + measure_fname + ' ' + mdir if usemask and not au.is_done(chkf, step): maskregex = au.mask_str() + '_' + au.slice_str() + '*' post.merge_slices (slidir, maskregex, au.mask_str(), mdir, False) au.checklist_add(chkf, step) #CORRELATION #read the measure argument and start processing if measure == 'pea': #measure pearson correlation for each population slice step = au.measureperslice_str() + step_params if not au.is_done(chkf, step): pear.pearson_correlation (outdir, mdir, usemask, excluf, leave) au.checklist_add(chkf, step) #merge all correlation slice measures step = au.postmerging_str() + step_params if not au.is_done(chkf, step):