def modality_progress(outDir, modality, num_cases, verbose): modDir = pjoin(outDir, modality) origDir = pjoin(modDir, 'origdata') preprocDir = pjoin(modDir, 'preproc') warpDir = pjoin(modDir, 'warped') skelDir = pjoin(modDir, 'skeleton') roiDir = pjoin(modDir, 'roi') # organize progress in a dataframe according to caseid print(f'\nProgress of {modality} TBSS:\n') df = pd.DataFrame( columns=['origdata', 'preprocessed', 'warped', 'skeletonized', 'roi']) # origdata List_actual, List = glob_dir(num_cases, pjoin(origDir, '*.nii.gz')) print('origdata obtained: ', len(List_actual)) df['origdata'] = [basename(imgPath).split('.')[0] for imgPath in List] # preproc List_actual, List = glob_dir(num_cases, pjoin(preprocDir, f'*{modality}.nii.gz')) print('pre-processed: ', len(List_actual)) df['preprocessed'] = [basename(imgPath).split('.')[0] for imgPath in List] # warped List_actual, List = glob_dir( num_cases, pjoin(warpDir, f'*{modality}_to_target.nii.gz')) print('registered to template space: ', len(List_actual)) df['warped'] = [basename(imgPath).split('.')[0] for imgPath in List] # skeleton List_actual, List = glob_dir( num_cases, pjoin(skelDir, f'*{modality}_to_target_skel.nii.gz')) print('skeletonized: ', len(List_actual)) df['skeletonized'] = [basename(imgPath).split('.')[0] for imgPath in List] # roi List_actual, List = glob_dir(num_cases, (pjoin(roiDir, f'*{modality}_roi.csv'))) print('roi-based stat calculated: ', len(List_actual)) df['roi'] = [basename(imgPath).split('.')[0] for imgPath in List] if verbose: with pd.option_context('display.max_rows', None, 'display.max_columns', None): print( '\nNOTE: Enlarge your terminal to have a better view of the dashboard\n' ) print(df)
def checkDuplicity(imgs, cases): # for c1 in cases: # count=0 # for c2 in cases: # if c1==c2: # count+=1 # if count>1: # warn(f'Caseid {c1} is not unique, ' # 'it exists multiple times or occurs as a substring in multiple caseids') print('\nChecking for duplicity of caseids in input images') for c in cases: dupPath=[] for imgPath in imgs: if c in imgPath: dupPath.append(basename(imgPath)) if len(dupPath)>1: print(f'One (or some) of the caseids don\'t uniquely represent input images. ' f'For example, caseid {c} exists as a substring in multiple images: ', dupPath) raise ValueError('Either remove conflicting imgs/cases or provide --input IMAGELIST.csv')
def write_caselist(logDir, List=None, Dir=None): if Dir is not None: imgs= glob(pjoin(Dir, '*.nii.gz')) elif List is not None: try: # if List.shape[1]>1: imgs= List[ :,0] except: imgs= List caselist=pjoin(logDir,'caselist.txt') cases=[] with open(caselist, 'w') as f: for img in imgs: caseid= basename(img).split('.')[0] cases.append(caseid) f.write(caseid+'\n') return (caselist,cases)
def process(args): cases= read_cases(args.caselist) cases.sort() # organize images into different directories =========================================================== # outDir # | # ------------------------------------------------------------------------------------------------------ # | | | | | | | | # | | | | | | | | # transform template FA MD AD RD log stats # | (same inner file structure as that of FA) # | # ---------------------------------------- # | | | | | # preproc origdata warped skeleton roi # # copy all FA into FA directory # put all preprocessed data into preproc directory # keep all warp/affine in transform directory # output all warped images in warped directory # output all skeletons in skel directory # output ROI based analysis files in roi directory # save all ROI statistics, mean, and combined images # define directories modDir = pjoin(args.outDir, f'{args.modality}') # args.xfrmDir = pjoin(args.outDir, 'transform') # args.statsDir = pjoin(args.outDir, 'stats') templateDir = pjoin(args.outDir, 'template/') # trailing slash is important for antsMultivariate*.sh preprocDir= pjoin(modDir, 'preproc') warpDir= pjoin(modDir, 'warped') skelDir= pjoin(modDir, 'skeleton') roiDir= pjoin(modDir, 'roi') # force creation of inner directories makeDirectory(warpDir, True) makeDirectory(skelDir, True) makeDirectory(roiDir, True) # modality can be one of [FA,MD,AD,RD] # we could use just listdir(), but the following would be stricter and safer # since cases are sorted and we named images as modDir/{c}.nii.gz # the following sort puts modImgs in the same order as that of cases modImgs = glob(pjoin(modDir, '*.nii.gz')) modImgs.sort() if not args.noFillHole: print('\nFilling holes inside the brain region in diffusion measure images') # fill holes in all modality images # caveat: origdata no longer remain origdata, become hole filled origdata pool= Pool(args.ncpu) pool.map_async(fillHoles, modImgs, error_callback= RAISE) pool.close() pool.join() # preprocessing ======================================================================================== if args.modality=='FA': print('Preprocessing FA images: eroding them and zeroing the end slices ...') modDir= pjoin(args.outDir, args.modality) CURRDIR= getcwd() chdir(modDir) check_call('tbss_1_preproc *.nii.gz', shell= True) # creates 'FA' and 'origdata' folders chdir(CURRDIR) print('Index file location has changed, see ', pjoin(preprocDir, 'slicesdir', 'index.html')) # rename args.modality/FA to args.modality/preproc move(pjoin(modDir, 'FA'), preprocDir) else: print(f'Preprocessing {args.modality} images using FA mask (eroding them and zeroing the end slices) ...') modDir = pjoin(args.outDir, args.modality) # force creation of inner directories makeDirectory(pjoin(modDir, 'origdata'), True) makeDirectory(pjoin(modDir, 'preproc'), True) pool= Pool(args.ncpu) for c, imgPath in zip(cases, modImgs): FAmask= pjoin(args.outDir, 'FA', 'preproc', f'{c}_FA_mask.nii.gz') preprocMod= pjoin(preprocDir, f'{c}_{args.modality}.nii.gz') pool.apply_async(_fslmask, (imgPath, FAmask, preprocMod), error_callback= RAISE) pool.close() pool.join() check_call((' ').join(['mv', pjoin(modDir, '*.nii.gz'), pjoin(modDir, 'origdata')]), shell= True) modImgs = glob(pjoin(preprocDir, f'*{args.modality}.nii.gz')) modImgs.sort() # create template ====================================================================================== if not args.template and args.modality=='FA': print('Creating study specific template ...') # we could pass modImgs directly to antsMult(), instead saving them to a .txt file for logging # modImgs = glob(pjoin(preprocDir, f'*{args.modality}*.nii.gz')) makeDirectory(templateDir, args.force) antsMultCaselist = pjoin(args.logDir, 'antsMultCaselist.txt') with open(antsMultCaselist, 'w') as f: for imgPath in modImgs: f.write(imgPath+'\n') # ATTN: antsMultivariateTemplateConstruction2.sh requires '/' at the end of templateDir antsMult(antsMultCaselist, templateDir, args.logDir, args.ncpu, args.verbose) # TODO: rename the template args.template= pjoin(templateDir, 'template0.nii.gz') check_call(f'ln -s {args.template} {args.statsDir}', shell= True) # warp and affine to template0.nii.gz have been created for each case during template construction # so template directory should be the transform directory args.xfrmDir= templateDir # register each image to the template ================================================================== elif args.template: # find warp and affine of FA image to args.template for each case if args.modality=='FA': print(f'Registering FA images to {args.template} space ..') makeDirectory(args.xfrmDir, True) pool= Pool(args.ncpu) for c, imgPath in zip(cases, modImgs): pool.apply_async(antsReg, (args.template, imgPath, pjoin(args.xfrmDir, f'{c}_FA'), args.logDir, args.verbose), error_callback= RAISE) pool.close() pool.join() # register template to a standard space ================================================================ # useful when you would like to do ROI based analysis using an atlas # project the created/specified template to the space of atlas if args.space: outPrefix = pjoin(args.xfrmDir, 'tmp2space') warp2space = outPrefix + '1Warp.nii.gz' trans2space = outPrefix + '0GenericAffine.mat' if not isfile(warp2space): print(f'Registering {args.template} to the space of {args.space} ...') antsReg(args.space, args.template, outPrefix, args.logDir, args.verbose) # TODO: rename the template args.template = outPrefix + 'Warped.nii.gz' if basename(args.template) not in listdir(args.statsDir): check_call(f'ln -s {args.template} {args.statsDir}', shell= True) pool= Pool(args.ncpu) for c, imgPath in zip(cases, modImgs): # generalize warp and affine warp2tmp= glob(pjoin(args.xfrmDir, f'{c}_FA*1Warp.nii.gz'))[0] trans2tmp= glob(pjoin(args.xfrmDir, f'{c}_FA*0GenericAffine.mat'))[0] output= pjoin(warpDir, f'{c}_{args.modality}_to_target.nii.gz') if not args.space: # print(f'Warping {imgPath} to template space ...') pool.apply_async(_antsApplyTransforms, (imgPath, output, args.template, warp2tmp, trans2tmp), error_callback= RAISE) else: # print(f'Warping {imgPath} to template-->standard space ...') pool.apply_async(_antsApplyTransforms, (imgPath, output, args.space, warp2tmp, trans2tmp, warp2space, trans2space), error_callback= RAISE) pool.close() pool.join() # create skeleton for each subject modImgsInTarget= glob(pjoin(warpDir, f'*_{args.modality}_to_target.nii.gz')) modImgsInTarget.sort() miFile= None if args.modality=='FA': print(f'Logging MI between warped images {warpDir}/*.nii.gz and target {args.template} ...') miFile= measureSimilarity(modImgsInTarget, cases, args.template, args.logDir, args.ncpu) # obtain modified args from skeletonize() which will be used for other modalities than FA args= skeletonize(modImgsInTarget, cases, args, skelDir, miFile) skelImgsInSub= glob(pjoin(skelDir, f'*_{args.modality}_to_target_skel.nii.gz')) skelImgsInSub.sort() # roi based analysis if args.labelMap: roi_analysis(skelImgsInSub, cases, args, roiDir, args.ncpu) return args