Exemplo n.º 1
0
def generate_ss(modDir, ssDir, cases, ncpu, cut_coords):

    # reorder both skeleton/* and warped/* according to caseId
    warpedImgs = glob(pjoin(modDir, 'warped', '*_to_target.nii.gz'))
    warpedImgs.sort()
    skelImgs = glob(pjoin(modDir, 'skeleton', '*_to_target_skel.nii.gz'))
    skelImgs.sort()

    makeDirectory(ssDir)
    pool = Pool(ncpu)
    for fg, bg, c in zip(image.iter_img(skelImgs), image.iter_img(warpedImgs),
                         cases):
        print('Taking screen shot of ', c)
        output_file = pjoin(ssDir, f'{c}.png')
        pool.apply_async(func=plotting.plot_stat_map,
                         args=(fg, ),
                         kwds={
                             'bg_img': bg,
                             'dim': False,
                             'annotate': False,
                             'draw_cross': False,
                             'cut_coords': cut_coords,
                             'resampling_interpolation': 'nearest',
                             'output_file': output_file
                         },
                         error_callback=RAISE)

    pool.close()
    pool.join()
    '''
Exemplo n.º 2
0
def measureSimilarity(imgs, cases, target, logDir, ncpu):

    pool = Pool(ncpu)
    for img,c in zip(imgs,cases):
        print(f'MI between {c} and target')
        miFile = pjoin(logDir, f'{c}_MI.txt')
        pool.apply_async(func=computeMI, args=(target, img, miFile), error_callback= RAISE)

    pool.close()
    pool.join()

    summaryCsv = pjoin(logDir, 'similarity.csv')

    mis = []
    with open(summaryCsv, 'w') as fw:
        for c in cases:
                with open(pjoin(logDir, f'{c}_MI.txt')) as f:
                    mi = f.read().strip()
                    fw.write(c+ ',' + mi + '\n')
                    mis.append(float(mi))


    # mis = []
    # for c in cases:
    #     with open(pjoin(logDir, f'{c}.txt')) as f:
    #         mi = f.read().strip()
    #         mis.append(float(mi))
    #
    # with open(summaryCsv, 'w') as fw:
    #     for i in np.argsort(mis):
    #         fw.write(cases[i] + ',' + str(mis[i]) + '\n')


    return summaryCsv
Exemplo n.º 3
0
def project_skeleton(c, imgPath, args, skelDir):
    '''
    Part of FSL (ID: 5.0.11)
    tbss_skeleton (Version 1.03)
    Copyright(c) 2005-2007, University of Oxford (Stephen Smith)

    Usage:
    tbss_skeleton -i <inputimage> -o <skeleton>
    tbss_skeleton -i <inputimage> -p <skel_thresh> <distancemap> <search_rule_mask> <4Ddata> <projected_4Ddata> [-a <alt_4D>] [-s <alt_skeleton>]}

    Compulsory arguments (You MUST set one or more of):
        -i,--in	    input image

    Optional arguments (You may optionally specify one or more of):
        -o,--out	output image
        -p          <skel_thresh> <distancemap> <search_rule_mask> <4Ddata> <projected_4Ddata>
        -a	        alternative 4Ddata (e.g. L1)
        -s	        alternative skeleton
        -h,--help	display this message
        -d,--debug	switch on debugging image outputs
        -D,--debug2 <skelpoints>	de-project <skelpoints> points on skeleton back to all_FA space
    '''

    # FIXME: what to use with -i when ANTS/ENIGMA, look into tbss_skeleton.cc code

    print(f'projecting {imgPath} on skeleton ...')
    modImgSkel = pjoin(skelDir, f'{c}_{args.modality}_to_target_skel.nii.gz')

    if args.modality == 'FA':

        cmd = (' ').join([
            'tbss_skeleton', '-i', imgPath, '-p', args.SKEL_THRESH,
            args.skeletonMaskDst, args.SEARCH_RULE_MASK, imgPath, modImgSkel,
            '-s', args.skeletonMask
        ])

        # check_call(cmd, shell= True)

        # use Popen() so we can wait()
        p = Popen(cmd, shell=True)
        p.wait()

    else:

        cmd = (' ').join([
            'tbss_skeleton', '-i',
            pjoin(args.outDir, 'FA', 'warped',
                  f'{c}_FA_to_target.nii.gz'), '-p', args.SKEL_THRESH,
            args.skeletonMaskDst, args.SEARCH_RULE_MASK,
            pjoin(args.outDir, 'FA', 'warped', f'{c}_FA_to_target.nii.gz'),
            modImgSkel, '-a', imgPath, '-s', args.skeletonMask
        ])

        # check_call(cmd, shell= True)

        # use Popen() so we can wait()
        p = Popen(cmd, shell=True)
        p.wait()
Exemplo n.º 4
0
def generate_ss(modDir, ssDir, cases, ncpu, cut_coords):

    # reorder both skeleton/* and warped/* according to caseId
    warpedImgs= glob(pjoin(modDir, 'warped', '*_to_target.nii.gz'))
    warpedImgs.sort()
    skelImgs= glob(pjoin(modDir, 'skeleton', '*_to_target_skel.nii.gz'))
    skelImgs.sort()

    makeDirectory(ssDir)

    pool= Pool(ncpu)
    for fg,bg,c in zip(image.iter_img(skelImgs), image.iter_img(warpedImgs), cases):
        print('Taking screen shot of ', c)
        output_file = pjoin(ssDir, f'{c}.png')
Exemplo n.º 5
0
def generate_diffusion_measures(dwImgPath, maskPath, caseId, outDir):

    dwImgPath = nrrd2nifti(dwImgPath)
    maskPath = nrrd2nifti(maskPath)

    inPrefix = dwImgPath.split('.')[0]
    outPrefix = pjoin('/tmp', caseId)
    dti(dwImgPath, maskPath, inPrefix, outPrefix)

    # organize diffusion measures into separate directories
    move(outPrefix + '_FA.nii.gz', pjoin(outDir, 'FA', caseId + '.nii.gz'))
    move(outPrefix + '_MD.nii.gz', pjoin(outDir, 'MD', caseId + '.nii.gz'))
    move(outPrefix + '_AD.nii.gz', pjoin(outDir, 'AD', caseId + '.nii.gz'))
    move(outPrefix + '_RD.nii.gz', pjoin(outDir, 'RD', caseId + '.nii.gz'))
Exemplo n.º 6
0
def generate_ss(modDir, ssDir, cases, ncpu):

    # reorder both skeleton/* and warped/* according to caseId
    warpedImgs= glob(pjoin(modDir, 'warped', '*_to_target.nii.gz'))
    skelImgs= glob(pjoin(modDir, 'skeleton', '*_to_target_skel.nii.gz'))
    warpedImgs= orderCases(warpedImgs, cases)
    skelImgs= orderCases(skelImgs, cases)

    makeDirectory(ssDir)

    pool= Pool(ncpu)
    for fg,bg,c in zip(image.iter_img(skelImgs), image.iter_img(warpedImgs), cases):
        print('Taking screen shot of ', c)
        output_file = pjoin(ssDir, f'{c}.png')
        pool.apply_async(func= plotting.plot_stat_map, args= (fg, ),
                         kwds= {'bg_img':bg, 'dim':False, 'annotate':False, 'draw_cross':False, 'output_file':output_file, })

    pool.close()
    pool.join()
Exemplo n.º 7
0
def main():

    parser = argparse.ArgumentParser(description='Generates an HTML file with skeleton overlaid upon the diffusivity measure '
                                                 'i.e. FA,MD,AD,RD etc', formatter_class= argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument('-d','--dir', type=str, default=argparse.SUPPRESS,
                        help='TBSS output directory where results are stored in --modality sudirectory; '
                             'you should have write permission into the directories')

    parser.add_argument('-m','--modality', type=str, default='FA', help='Modality={FA,MD,AD,RD,...} of images')
    parser.add_argument('-c','--caselist', type=str, default=argparse.SUPPRESS,
                        help='caseIds from the caselist are used to label screenshots, default: outDir/log/caselist.txt')

    parser.add_argument('-n','--ncpu', type= int, default=4, help='number of threads to use, if other processes in your computer '
                        'becomes sluggish/you run into memory error, reduce --nproc')

    parser.add_argument('-i', '--cut_coords', type=int, nargs=3,
                        help='The MNI coordinates of the point where cut is '
                             'performed. eg) --cut_coords 1 -19 14')

    args = parser.parse_args()

    args.outDir= abspath(args.dir)
    modDir= pjoin(args.outDir, args.modality)
    ssDir= pjoin(modDir, 'slicesdir')


    if args.caselist:
        args.caselist= abspath(args.caselist)
    else:
        # use default
        args.caselist= pjoin(args.outDir, 'log', 'caselist.txt')


    cases= read_cases(args.caselist)
    cases.sort()

    # generate screenshots
    generate_ss(modDir, ssDir, cases, args.ncpu, cut_coords=args.cut_coords)

    # write summary HTML file
    write_html(ssDir, cases)
Exemplo n.º 8
0
def write_html(ssDir, cases):

    STDOUT= sys.stdout
    summaryFile= pjoin(ssDir, 'summary.html')
    print('Writing html file, view output at ', summaryFile)
    f= open(summaryFile, 'w')

    # summary html is inspired by MATLAB_QC/QC_FA_SKEL/enigmaDTI_FA_Skel_QC.html

    sys.stdout= f

    # beginning of html
    print('''<html>
<head>
<style type="text/css">
*
{
margin: 0px;
padding: 0px;
}
html,body
{
height: 100%;
}
</style>
</head>
<body>
<pre> This HTML file was generated by  <b>https://github.com/pnlbwh/tbss</b>  pipeline

</pre>''')



    for c in cases:
        # repeat for each case
        print(f'''<table cellspacing="1" style="width:100%;background-color:"white";">
<tr>
<td> <FONT COLOR=BLUE FACE="Geneva, Arial" SIZE=4> {c} </FONT> </td>
</tr>
<tr>
<td><a href="file:{c}.png"><img src="{c}.png"width="100%" ></a></td>
<br>
</tr>
</table>''')


    # ending of html
    print('''</body>
</html>''')

    f.close()

    sys.stdout= STDOUT
Exemplo n.º 9
0
def write_caselist(logDir, List=None, Dir=None):

    if Dir is not None:
        imgs= glob(pjoin(Dir, '*.nii.gz'))

    elif List is not None:
        try:
        # if List.shape[1]>1:
            imgs= List[ :,0]
        except:
            imgs= List

    caselist=pjoin(logDir,'caselist.txt')
    cases=[]
    with open(caselist, 'w') as f:
        for img in imgs:
            caseid= basename(img).split('.')[0]
            cases.append(caseid)
            f.write(caseid+'\n')

    return (caselist,cases)
Exemplo n.º 10
0
def show_progress(verbose=False):

    config = ConfigParser()
    config.read(pjoin(FILEDIR, 'config.ini'))
    outDir = config['DEFAULT']['outDir']
    modalities = [x for x in config['DEFAULT']['modalities'].split(',')]

    # read caselist
    cases = read_cases(pjoin(outDir, 'log', 'caselist.txt'))
    num_cases = len(cases)

    # read start time
    start_time_file = pjoin(outDir, 'log', 'start_time.txt')
    start_time = read_time(start_time_file)

    # read final time
    final_time_file = pjoin(outDir, 'log', 'final_time.txt')
    if isfile(final_time_file):
        final_time = read_time(final_time_file)

    else:
        final_time = datetime.now()

    print('Output directory:              ', outDir)
    print('Number of cases to process:    ', num_cases)
    for modality in modalities:
        modality_progress(outDir, modality, num_cases, verbose)

    # show duration
    duration_in_seconds = (final_time - start_time).total_seconds()

    days = divmod(duration_in_seconds, 86400)
    hours = divmod(days[1], 3600)  # Use remainder of days to calc hours
    minutes = divmod(hours[1], 60)  # Use remainder of hours to calc minutes
    seconds = divmod(minutes[1], 1)  # Use remainder of minutes to calc seconds
    print(
        "\nTime taken so far: %d days, %d hours, %d minutes and %d seconds\n" %
        (days[0], hours[0], minutes[0], seconds[0]))
Exemplo n.º 11
0
def measureSimilarity(imgs, cases, target, logDir, ncpu):

    pool = Pool(ncpu)
    for img, c in zip(imgs, cases):
        print(f'MI between {c} and target')
        miFile = pjoin(logDir, f'{c}_MI.txt')
        pool.apply_async(func=computeMI,
                         args=(target, img, miFile),
                         error_callback=RAISE)

    pool.close()
    pool.join()

    summaryCsv = pjoin(logDir, 'similarity.csv')

    # loop for debugging
    # mis = []
    # with open(summaryCsv, 'w') as fw:
    #     for c in cases:
    #             with open(pjoin(logDir, f'{c}_MI.txt')) as f:
    #                 mi = f.read().strip()
    #                 fw.write(c+ ',' + mi + '\n')
    #                 mis.append(float(mi))

    print('The lower the MI, the better is the quality of registration. '
          f'Hence {summaryCsv} notes cases in ascending order of MI.')

    mis = []
    for c in cases:
        with open(pjoin(logDir, f'{c}_MI.txt')) as f:
            mi = f.read().strip()
            mis.append(float(mi))

    with open(summaryCsv, 'w') as fw:
        for i in np.argsort(mis):
            fw.write(cases[i] + ',' + str(mis[i]) + '\n')

    return summaryCsv
Exemplo n.º 12
0
def roi_analysis(imgs, cases, args, statsDir, roiDir, N_CPU):

    intLabels = load(args.labelMap).get_data()
    label2name = parse_labels(np.unique(intLabels)[1:], args.lut)
    commonLabels = average_labels(label2name.values())

    pool = Pool(N_CPU)
    for c, imgPath in zip(cases, imgs):

        # subject_stat(imgPath, c, args.modality, label2name, commonLabels, intLabels, roiDir, args.avg)
        pool.apply_async(func=subject_stat,
                         args=(imgPath, c, args.modality, label2name,
                               commonLabels, intLabels, roiDir, args.avg),
                         error_callback=RAISE)

    pool.close()
    pool.join()

    # combine csvs
    # stat_file= pjoin(roiDir, f'{c}_{modality}_roi.csv')
    # avg_stat_file= pjoin(roiDir, f'{c}_{modality}_roi_avg.csv')
    # read one stat_file, obtain headers
    df = pd.read_csv(pjoin(roiDir, f'{cases[0]}_{args.modality}_roi.csv'))
    df_comb = pd.DataFrame(columns=np.append('Cases', df['Tract'].values))

    for i, c in enumerate(cases):
        df = pd.read_csv(pjoin(roiDir, f'{c}_{args.modality}_roi.csv'))
        # num2str() text formatting is for precision control
        df_comb.loc[i] = np.append(
            c, np.array([num2str(x) for x in df['Average'].values]))

    combined_stat = pjoin(statsDir, f'{args.modality}_combined_roi.csv')
    df_comb.sort_index(axis=1).set_index('Cases').to_csv(combined_stat)
    print('Made ', combined_stat)

    if args.avg:
        # read one avg_stat_file, obtain headers
        df_avg = pd.read_csv(
            pjoin(roiDir, f'{cases[0]}_{args.modality}_roi_avg.csv'))
        df_avg_comb = pd.DataFrame(
            columns=np.append('Cases', df_avg['Tract'].values))

        for i, c in enumerate(cases):
            df = pd.read_csv(pjoin(roiDir, f'{c}_{args.modality}_roi_avg.csv'))
            # num2str() text formatting is for precision control
            df_avg_comb.loc[i] = np.append(
                c, np.array([num2str(x) for x in df['Average'].values]))

        combined_avg_stat = pjoin(statsDir,
                                  f'{args.modality}_combined_roi_avg.csv')
        df_avg_comb.sort_index(
            axis=1).set_index('Cases').to_csv(combined_avg_stat)
        print('Made ', combined_avg_stat)
Exemplo n.º 13
0
def modality_progress(outDir, modality, num_cases, verbose):

    modDir = pjoin(outDir, modality)
    origDir = pjoin(modDir, 'origdata')
    preprocDir = pjoin(modDir, 'preproc')
    warpDir = pjoin(modDir, 'warped')
    skelDir = pjoin(modDir, 'skeleton')
    roiDir = pjoin(modDir, 'roi')

    # organize progress in a dataframe according to caseid
    print(f'\nProgress of {modality} TBSS:\n')
    df = pd.DataFrame(
        columns=['origdata', 'preprocessed', 'warped', 'skeletonized', 'roi'])

    # origdata
    List_actual, List = glob_dir(num_cases, pjoin(origDir, '*.nii.gz'))
    print('origdata obtained:             ', len(List_actual))
    df['origdata'] = [basename(imgPath).split('.')[0] for imgPath in List]

    # preproc
    List_actual, List = glob_dir(num_cases,
                                 pjoin(preprocDir, f'*{modality}.nii.gz'))
    print('pre-processed:                 ', len(List_actual))
    df['preprocessed'] = [basename(imgPath).split('.')[0] for imgPath in List]

    # warped
    List_actual, List = glob_dir(
        num_cases, pjoin(warpDir, f'*{modality}_to_target.nii.gz'))
    print('registered to template space:  ', len(List_actual))
    df['warped'] = [basename(imgPath).split('.')[0] for imgPath in List]

    # skeleton
    List_actual, List = glob_dir(
        num_cases, pjoin(skelDir, f'*{modality}_to_target_skel.nii.gz'))
    print('skeletonized:                  ', len(List_actual))
    df['skeletonized'] = [basename(imgPath).split('.')[0] for imgPath in List]

    # roi
    List_actual, List = glob_dir(num_cases,
                                 (pjoin(roiDir, f'*{modality}_roi.csv')))
    print('roi-based stat calculated:     ', len(List_actual))
    df['roi'] = [basename(imgPath).split('.')[0] for imgPath in List]

    if verbose:
        with pd.option_context('display.max_rows', None, 'display.max_columns',
                               None):
            print(
                '\nNOTE: Enlarge your terminal to have a better view of the dashboard\n'
            )
            print(df)
Exemplo n.º 14
0
def process(args):

    cases= read_cases(args.caselist)
    cases.sort()

    # organize images into different directories ===========================================================

    # outDir
    #    |
    # ------------------------------------------------------------------------------------------------------
    #    |           |             |                |        |       |                   |           |
    #    |           |             |                |        |       |                   |           |
    # transform   template        FA                MD       AD      RD                 log        stats
    #                              |       (same inner file structure as that of FA)
    #                              |
    #                 ----------------------------------------
    #                  |         |         |       |        |
    #                 preproc  origdata  warped  skeleton  roi
    #
    # copy all FA into FA directory
    # put all preprocessed data into preproc directory
    # keep all warp/affine in transform directory
    # output all warped images in warped directory
    # output all skeletons in skel directory
    # output ROI based analysis files in roi directory
    # save all ROI statistics, mean, and combined images


    # define directories
    modDir = pjoin(args.outDir, f'{args.modality}')
    # args.xfrmDir = pjoin(args.outDir, 'transform')
    # args.statsDir = pjoin(args.outDir, 'stats')
    templateDir = pjoin(args.outDir, 'template/')  # trailing slash is important for antsMultivariate*.sh
    preprocDir= pjoin(modDir, 'preproc')
    warpDir= pjoin(modDir, 'warped')
    skelDir= pjoin(modDir, 'skeleton')
    roiDir= pjoin(modDir, 'roi')

    # force creation of inner directories
    makeDirectory(warpDir, True)
    makeDirectory(skelDir, True)
    makeDirectory(roiDir, True)


    # modality can be one of [FA,MD,AD,RD]
    # we could use just listdir(), but the following would be stricter and safer
    # since cases are sorted and we named images as modDir/{c}.nii.gz
    # the following sort puts modImgs in the same order as that of cases
    modImgs = glob(pjoin(modDir, '*.nii.gz'))
    modImgs.sort()


    if not args.noFillHole:
        print('\nFilling holes inside the brain region in diffusion measure images')
        # fill holes in all modality images
        # caveat: origdata no longer remain origdata, become hole filled origdata
        pool= Pool(args.ncpu)
        pool.map_async(fillHoles, modImgs, error_callback= RAISE)
        pool.close()
        pool.join()


    # preprocessing ========================================================================================
    if args.modality=='FA':
        print('Preprocessing FA images: eroding them and zeroing the end slices ...')
        modDir= pjoin(args.outDir, args.modality)
        CURRDIR= getcwd()
        chdir(modDir)
        check_call('tbss_1_preproc *.nii.gz', shell= True) # creates 'FA' and 'origdata' folders
        chdir(CURRDIR)
        print('Index file location has changed, see ', pjoin(preprocDir, 'slicesdir', 'index.html'))

        # rename args.modality/FA to args.modality/preproc
        move(pjoin(modDir, 'FA'), preprocDir)
    else:
        print(f'Preprocessing {args.modality} images using FA mask (eroding them and zeroing the end slices) ...')
        modDir = pjoin(args.outDir, args.modality)

        # force creation of inner directories
        makeDirectory(pjoin(modDir, 'origdata'), True)
        makeDirectory(pjoin(modDir, 'preproc'), True)

        pool= Pool(args.ncpu)
        for c, imgPath in zip(cases, modImgs):
            FAmask= pjoin(args.outDir, 'FA', 'preproc', f'{c}_FA_mask.nii.gz')
            preprocMod= pjoin(preprocDir, f'{c}_{args.modality}.nii.gz')

            pool.apply_async(_fslmask, (imgPath, FAmask, preprocMod), error_callback= RAISE)


        pool.close()
        pool.join()

        check_call((' ').join(['mv', pjoin(modDir, '*.nii.gz'), pjoin(modDir, 'origdata')]), shell= True)

    modImgs = glob(pjoin(preprocDir, f'*{args.modality}.nii.gz'))
    modImgs.sort()

    # create template ======================================================================================
    if not args.template and args.modality=='FA':
        print('Creating study specific template ...')
        # we could pass modImgs directly to antsMult(), instead saving them to a .txt file for logging
        # modImgs = glob(pjoin(preprocDir, f'*{args.modality}*.nii.gz'))

        makeDirectory(templateDir, args.force)

        antsMultCaselist = pjoin(args.logDir, 'antsMultCaselist.txt')
        with open(antsMultCaselist, 'w') as f:
            for imgPath in modImgs:
                f.write(imgPath+'\n')

        # ATTN: antsMultivariateTemplateConstruction2.sh requires '/' at the end of templateDir
        antsMult(antsMultCaselist, templateDir, args.logDir, args.ncpu, args.verbose)
        # TODO: rename the template
        args.template= pjoin(templateDir, 'template0.nii.gz')
        check_call(f'ln -s {args.template} {args.statsDir}', shell= True)

        # warp and affine to template0.nii.gz have been created for each case during template construction
        # so template directory should be the transform directory
        args.xfrmDir= templateDir

    # register each image to the template ==================================================================
    elif args.template:
        # find warp and affine of FA image to args.template for each case
        if args.modality=='FA':
            print(f'Registering FA images to {args.template} space ..')
            makeDirectory(args.xfrmDir, True)
            pool= Pool(args.ncpu)
            for c, imgPath in zip(cases, modImgs):
                pool.apply_async(antsReg, (args.template, imgPath, pjoin(args.xfrmDir, f'{c}_FA'), args.logDir, args.verbose),
                                error_callback= RAISE)

            pool.close()
            pool.join()


    # register template to a standard space ================================================================
    # useful when you would like to do ROI based analysis using an atlas
    # project the created/specified template to the space of atlas
    if args.space:
        outPrefix = pjoin(args.xfrmDir, 'tmp2space')
        warp2space = outPrefix + '1Warp.nii.gz'
        trans2space = outPrefix + '0GenericAffine.mat'
        if not isfile(warp2space):
            print(f'Registering {args.template} to the space of {args.space} ...')
            antsReg(args.space, args.template, outPrefix, args.logDir, args.verbose)

        # TODO: rename the template
        args.template = outPrefix + 'Warped.nii.gz'
        if basename(args.template) not in listdir(args.statsDir):
            check_call(f'ln -s {args.template} {args.statsDir}', shell= True)
        
    pool= Pool(args.ncpu)
    for c, imgPath in zip(cases, modImgs):
        # generalize warp and affine
        warp2tmp= glob(pjoin(args.xfrmDir, f'{c}_FA*1Warp.nii.gz'))[0]
        trans2tmp= glob(pjoin(args.xfrmDir, f'{c}_FA*0GenericAffine.mat'))[0]
        output= pjoin(warpDir, f'{c}_{args.modality}_to_target.nii.gz')

        if not args.space:
            # print(f'Warping {imgPath} to template space ...')
            pool.apply_async(_antsApplyTransforms, (imgPath, output, args.template, warp2tmp, trans2tmp),
                            error_callback= RAISE)


        else:
            # print(f'Warping {imgPath} to template-->standard space ...')
            pool.apply_async(_antsApplyTransforms, (imgPath, output, args.space, warp2tmp, trans2tmp, warp2space, trans2space),
                            error_callback= RAISE)


    pool.close()
    pool.join()
    

    # create skeleton for each subject
    modImgsInTarget= glob(pjoin(warpDir, f'*_{args.modality}_to_target.nii.gz'))
    modImgsInTarget.sort()

    miFile= None
    if args.modality=='FA':
        print(f'Logging MI between warped images {warpDir}/*.nii.gz and target {args.template} ...')
        miFile= measureSimilarity(modImgsInTarget, cases, args.template, args.logDir, args.ncpu)


    # obtain modified args from skeletonize() which will be used for other modalities than FA
    args= skeletonize(modImgsInTarget, cases, args, skelDir, miFile)

    skelImgsInSub= glob(pjoin(skelDir, f'*_{args.modality}_to_target_skel.nii.gz'))
    skelImgsInSub.sort()

    # roi based analysis
    if args.labelMap:
        roi_analysis(skelImgsInSub, cases, args, roiDir, args.ncpu)

    return args
Exemplo n.º 15
0
def subject_stat(imgPath, c, modality, label2name, commonLabels, labelMap,
                 roiDir, avgFlag):

    print('Creating ROI based statistics for', imgPath)
    img = load(imgPath).get_data()
    _imgNonzero = img > 0

    df = pd.DataFrame(columns=['Tract', 'Average', 'nVoxels'])

    _img_roi = img[_imgNonzero]
    df.loc[0] = [f'Average{modality}'
                 ] + [num2str(x) for x in [_img_roi.mean(), _img_roi.size]]

    stat_file = pjoin(roiDir, f'{c}_{modality}_roi.csv')
    avg_stat_file = pjoin(roiDir, f'{c}_{modality}_roi_avg.csv')

    for i, intLabel in enumerate(label2name.keys()):
        roi = labelMap == int(intLabel)
        _roi = np.logical_and(_imgNonzero, roi)
        _img_roi = img[_roi]

        if _img_roi.size:
            df.loc[i + 1] = [label2name[intLabel]] + [
                num2str(x) for x in [_img_roi.mean(), _img_roi.size]
            ]
        else:
            df.loc[i + 1] = [label2name[intLabel]] + ['0', '0']

    df.set_index('Tract').to_csv(stat_file)
    # FIXME: save unsorted df to match with that of ENIGMA?
    # df.sort_values(by='Tract').set_index('Tract').to_csv(stat_file)
    print('Made ', stat_file)

    if avgFlag:

        df_avg = pd.DataFrame(columns=['Tract', 'Average', 'nVoxels'])
        df_avg.loc[0] = df.loc[0].copy()

        row = 1
        for common in commonLabels:
            dm = []
            num = []

            for i, label in enumerate(label2name.values()):
                # label.split('-') to avoid confusion between CP being in both CP-R and ICP-R
                if re.search(r'\b' + common + r'\b', label):

                    df_avg.loc[row] = df.loc[i +
                                             1].copy()  # Right or Left value
                    row += 1
                    dm.append(float(df.loc[i + 1][1]))
                    num.append(int(df.loc[i + 1][2]))

                    # since we are averaging over R/L only, len(dm) <= 2
                    if len(dm) == 2:
                        # average of R/L
                        df_avg.loc[row] = [
                            common,
                            num2str(
                                np.average(
                                    dm,
                                    weights=num if np.sum(num) else [1, 1])),
                            str(int(np.sum(num)))
                        ]
                        row = row + 1
                        break

        # FIXME: save unsorted df_avg so Tract, Right-Tract, and Left-Tract are together?
        df_avg.sort_values(by='Tract').set_index('Tract').to_csv(avg_stat_file)
        print('Made ', avg_stat_file)
Exemplo n.º 16
0
def skeletonize(imgs, cases, args, skelDir, miFile):

    target = load(args.template)
    targetData = target.get_data()
    X, Y, Z = targetData.shape[0], targetData.shape[1], targetData.shape[2]

    # provide the user with allFA sequence so he knows which volume he is looking at while scrolling through allFA
    seqFile = pjoin(args.statsDir, f'all_{args.modality}_sequence.txt')
    with open(seqFile, 'w') as f:
        f.write('index,caseid\n')
        for i, c in enumerate(cases):
            f.write(f'{i},{c}\n')

    print(f'Calculating mean {args.modality} over all the cases ...')
    allFAdata, cumsumFA = calc_mean(imgs, (X, Y, Z), args.qc)

    if args.qc:
        allFA = pjoin(args.statsDir, f'all_{args.modality}.nii.gz')
        save_nifti(allFA, np.moveaxis(allFAdata, 0, -1), target.affine,
                   target.header)

        print(
            f'''\n\nQC the warped {args.modality} images: {allFA}, view {seqFile} for index of volumes in all_FA.nii.gz. 
You may use fsleyes/fslview to load {allFA}.

MI metric b/w the warped images and target are stored in {miFile}

It might be helpful to re-run registration for warped images that are bad.

Moving images are   :   {args.outDir}/preproc/
Target is           :   {args.template}
Transform files are :   {args.xfrmDir}/
Warped images are   :   {args.outDir}/warped/

Save any re-registered images in {args.outDir}/warped/ with the same name as before

For re-registration of any subject, output the transform files to a temporary directory:
        
        mkdir /tmp/badRegistration/
        
        antsRegistrationSyNQuick.sh -d 3 \\
        -f TEMPLATE \\
        -m FA/preproc/caseid_FA.nii.gz \\
        -o /tmp/badRegistration/caseid_FA
        
        antsApplyTransforms -d 3 \\
        -i FA/preproc/caseid_FA.nii.gz \\
        -o FA/warped/caseid_[FA/MD/AD/RD]_to_target.nii.gz \\
        -r TEMPLATE \\
        -t /tmp/badRegistration/caseid_FA1Warp.nii.gz /tmp/badRegistration/caseid_FA0GenericAffine.mat
    
Finally, if wanted, you can copy the transform files to {args.xfrmDir}/ directory.

Note: Replace all the above directories with absolute paths.\n\n''')

        while input('Press Enter when you are done with QC/re-registration: '):
            pass

        allFAdata, cumsumFA = calc_mean(imgs, targetData.shape, args.qc)

    meanFAdata = cumsumFA / len(imgs)
    meanFA = pjoin(args.statsDir, 'mean_FA.nii.gz')

    # outDir should contain
    # all_{modality}.nii.gz
    # mean_FA.nii.gz
    # mean_FA_mask.nii.gz
    # mean_FA_skeleton.nii.gz
    # mean_FA_skeleton_mask.nii.gz
    # mean_FA_skeleton_mask_dst.nii.gz

    if args.modality == 'FA':

        if not args.templateMask:
            print('Creating template mask ...')
            args.templateMask = pjoin(args.statsDir, 'mean_FA_mask.nii.gz')
            meanFAmaskData = (meanFAdata > 0) * 1
            save_nifti(args.templateMask, meanFAmaskData.astype('uint8'),
                       target.affine, target.header)

        else:
            meanFAmaskData = load(args.templateMask).get_data()

        meanFAdata = meanFAdata * meanFAmaskData
        save_nifti(meanFA, meanFAdata, target.affine, target.header)

        # if skeleton is not given:
        #     create all three of skeleton, skeletonMask, and skeletonMaskDst

        # if skeleton is given and (neither skeletonMask nor skeletonMaskDst is given):
        #     create skeletonMask and skeletonMaskDst

        # if skeleton and skeletonMask is given and skeletonMaskDst is not given:
        #     create skeletonMaskDst

        if not args.skeleton:
            print(
                'Creating all three of skeleton, skeletonMask, and skeletonMaskDst ...'
            )
            args.skeleton = pjoin(args.statsDir, 'mean_FA_skeleton.nii.gz')
            args.skeletonMask = pjoin(args.statsDir,
                                      'mean_FA_skeleton_mask.nii.gz')
            args.skeletonMaskDst = pjoin(args.statsDir,
                                         'mean_FA_skeleton_mask_dst.nii.gz')

            _create_skeleton(meanFA, args.skeleton)
            _create_skeletonMask(args.skeleton, args.SKEL_THRESH,
                                 args.skeletonMask)
            _create_skeletonMaskDst(args.templateMask, args.skeletonMask,
                                    args.skeletonMaskDst)

        if args.skeleton and not (args.skeletonMask or args.skeletonMaskDst):
            print('Creating skeletonMask and skeletonMaskDst ...')
            args.skeletonMask = pjoin(args.statsDir,
                                      'mean_FA_skeleton_mask.nii.gz')
            args.skeletonMaskDst = pjoin(args.statsDir,
                                         'mean_FA_skeleton_mask_dst.nii.gz')

            _create_skeletonMask(args.skeleton, args.SKEL_THRESH,
                                 args.skeletonMask)
            _create_skeletonMaskDst(args.templateMask, args.skeletonMask,
                                    args.skeletonMaskDst)

        if args.skeleton and not args.skeletonMask and args.skeletonMaskDst:
            print('Creating skeletonMask ...')
            args.skeletonMask = pjoin(args.statsDir,
                                      'mean_FA_skeleton_mask.nii.gz')

            _create_skeletonMask(args.skeleton, args.SKEL_THRESH,
                                 args.skeletonMask)

        if (args.skeleton and args.skeletonMask) and not args.skeletonMaskDst:
            print('Creating skeletonMaskDst ...')
            args.skeletonMaskDst = pjoin(args.statsDir,
                                         'mean_FA_skeleton_mask_dst.nii.gz')

            _create_skeletonMaskDst(args.templateMask, args.skeletonMask,
                                    args.skeletonMaskDst)

    # mask allFA, this step does not seem to have any effect on the pipeline, it should help the user to visualize only
    if args.qc:
        check_call(
            (' ').join(['fslmaths', allFA, '-mas', args.templateMask, allFA]),
            shell=True)

    # projecting all {modality} data onto skeleton
    pool = Pool(args.ncpu)
    for c, imgPath in zip(cases, imgs):
        pool.apply_async(project_skeleton, (c, imgPath, args, skelDir),
                         error_callback=RAISE)

    pool.close()
    pool.join()

    if not args.noAllSkeleton:

        allFAskeletonized = pjoin(args.statsDir,
                                  f'all_{args.modality}_skeletonized.nii.gz')
        print('Creating ', allFAskeletonized)

        # this loop has been moved out of multiprocessing block to prevent memroy error
        allFAskeletonizedData = np.zeros((len(imgs), X, Y, Z), dtype='float32')
        for i, c in enumerate(cases):
            allFAskeletonizedData[i, :] = load(
                pjoin(
                    skelDir,
                    f'{c}_{args.modality}_to_target_skel.nii.gz')).get_data()

        save_nifti(allFAskeletonized, np.moveaxis(allFAskeletonizedData, 0,
                                                  -1), target.affine,
                   target.header)
        print(
            f'Created {allFAskeletonized} and corresponding index file: {seqFile}'
        )

    return args
Exemplo n.º 17
0
# ===============================================================================

from tbssUtil import FILEDIR, pjoin, move, isfile, makeDirectory, check_call, chdir, getcwd, ConfigParser, Pool
from conversion import read_cases
from antsTemplate import antsReg
from orderCases import orderCases
from glob import glob
from plumbum.cmd import antsApplyTransforms, fslmaths
from measureSimilarity import measureSimilarity
from plumbum import FG
from skeletonize import skeletonize
from roi_analysis import roi_analysis
from antsTemplate import antsMult

config = ConfigParser()
config.read(pjoin(FILEDIR, 'config.ini'))
N_CPU = int(config['DEFAULT']['N_CPU'])


def process(args):

    cases = read_cases(args.caselist)

    # organize images into different directories ===========================================================

    # outDir
    #    |
    # ------------------------------------------------------------------------------------------------------
    #    |           |             |                |        |       |                   |           |
    #    |           |             |                |        |       |                   |           |
    # transform   template        FA                MD       AD      RD                 log        stats
Exemplo n.º 18
0
    return (caselist,cases)


properties= ['year', 'month', 'day', 'day', 'hour', 'minute', 'second', 'microsecond']

def write_time(filename, obj):

    values=[]
    with open(filename, 'w') as f:
        for prop in properties:
            values.append(eval(f'obj.{prop}'))

        f.write((' ').join([str(v) for v in values]))


def read_time(filename):

    with open(filename) as f:
        values= [int(x) for x in f.read().strip().split()]

    return datetime(values[0], values[1], values[2], values[4], values[5], values[6])

if __name__=='__main__':
    from datetime import datetime

    filename= pjoin('/home/tb571/Documents/TBSS/lib/tests/enigmaTemplateOutput/log/start_time.txt')
    write_time(filename, datetime.now())
    print(read_time(filename))

    pass
Exemplo n.º 19
0
def main():

    parser = argparse.ArgumentParser(
        description=
        'Generates an HTML file with skeleton overlaid upon the diffusivity measure '
        'i.e. FA,MD,AD,RD etc',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter)

    parser.add_argument(
        '-d',
        '--dir',
        type=str,
        required=True,
        default=argparse.SUPPRESS,
        help=
        'TBSS output directory where results are stored in --modality sudirectory; '
        'you should have write permission into the directories')

    parser.add_argument('-m',
                        '--modality',
                        type=str,
                        default='FA',
                        help='Modality={FA,MD,AD,RD,...} of images')

    parser.add_argument(
        '-n',
        '--ncpu',
        type=int,
        default=4,
        help='number of threads to use, if other processes in your computer '
        'becomes sluggish/you run into memory error, reduce --nproc')

    parser.add_argument(
        '--cut_coords',
        type=str,
        help='The MNI coordinates of the point where cut is '
        'performed. Examples: --cut_coords enigma, --cut_coords fmrib, --cut_coords 1,-19,14 (comma separated, no spaces) '
        'See details in docs/TUTORIAL.md',
        default='auto')

    args = parser.parse_args()

    args.outDir = abspath(args.dir)
    modDir = pjoin(args.outDir, args.modality)
    ssDir = pjoin(modDir, 'slicesdir')

    cases = read_cases(pjoin(args.outDir, 'log', 'caselist.txt'))
    cases.sort()

    if args.cut_coords == 'enigma':
        cut_coords = (1, -19, 14)
    elif args.cut_coords == 'fmrib':
        cut_coords = (-17, -24, 14)
    elif args.cut_coords == 'auto':
        cut_coords = None
    else:
        cut_coords = tuple(int(i) for i in args.cut_coords.split(','))

    # generate screenshots
    generate_ss(modDir, ssDir, cases, args.ncpu, cut_coords)

    # write summary HTML file
    write_html(ssDir, cases)