def consistencyCheck(ref_csv,
                     outputBshellFile=None,
                     outPutResolutionFile=None):

    try:
        ref_imgs, _ = read_imgs_masks(ref_csv)
    except:
        ref_imgs = read_imgs(ref_csv)

    if isfile(outputBshellFile) and isfile(outPutResolutionFile):
        ref_bvals = read_bvals(outputBshellFile)
        ref_res = np.load(outPutResolutionFile)
    else:
        ref_bshell_img = ref_imgs[0]
        print(f'Using {ref_bshell_img} to determine b-shells')

        inPrefix = abspath(ref_bshell_img).split('.nii')[0]
        ref_bvals = findBShells(inPrefix + '.bval', outputBshellFile)

        ref_res = load(ref_bshell_img).header['pixdim'][1:4]
        np.save(outPutResolutionFile, ref_res)

    print('b-shells are', ref_bvals)

    print('\nSite', ref_csv, '\n')

    print('Checking consistency of b-shells among subjects')
    check_bshells(ref_imgs, ref_bvals)

    print('spatial resolution is', ref_res)
    print('Checking consistency of spatial resolution among subjects')
    check_resolution(ref_imgs, ref_res)
Ejemplo n.º 2
0
def separateAllBshells(ref_csv, ref_bvals_file, ncpu=4, outPrefix=None):

    ref_bvals = read_bvals(ref_bvals_file)

    try:
        imgs, masks = read_imgs_masks(ref_csv)
    except:
        imgs = read_imgs(ref_csv)
        masks = None

    pool = Pool(int(ncpu))
    for imgPath in imgs:
        pool.apply_async(separateBshells,
                         kwds={
                             'imgPath': imgPath,
                             'ref_bvals': ref_bvals
                         },
                         error_callback=RAISE)

    pool.close()
    pool.join()

    if outPrefix:
        outPrefix = abspath(outPrefix)
    else:
        return

    for bval in ref_bvals:

        f = open(f'{outPrefix}_b{int(bval)}.csv', 'w')

        if masks:
            for imgPath, maskPath in zip(imgs, masks):
                inPrefix = abspath(imgPath).split('.nii')[0]
                bPrefix = inPrefix + f'_b{int(bval)}'
                f.write(f'{bPrefix}.nii.gz,{maskPath}\n')

        else:
            for imgPath in imgs:
                inPrefix = abspath(imgPath).split('.nii')[0]
                bPrefix = inPrefix + f'_b{int(bval)}'
                f.write(f'{bPrefix}.nii.gz\n')

        f.close()
def joinAllBshells(tar_csv, ref_bvals_file, separatedPrefix=None, ncpu=4):

    ref_bvals = read_bvals(ref_bvals_file)
    if tar_csv:

        try:
            imgs, _ = read_imgs_masks(tar_csv)
        except:
            imgs = read_imgs(tar_csv)

        pool = Pool(int(ncpu))
        for imgPath in imgs:
            pool.apply_async(joinBshells,
                             kwds=({
                                 'imgPath': imgPath,
                                 'ref_bvals': ref_bvals,
                                 'sep_prefix': separatedPrefix
                             }),
                             error_callback=RAISE)

        pool.close()
        pool.join()
Ejemplo n.º 4
0
def main():

    parser = argparse.ArgumentParser(description='''Warps diffusion measures (FA, MD, GFA) to template space 
    and then to MNI space. Finally, calculates mean FA over IITmean_FA_skeleton.nii.gz''')
    parser.add_argument('-i', '--input', type=str, required=True, 
        help='a .txt/.csv file having one column for FA imgs, '
             'or two columns for (img,mask) pair, the latter list is what you used in/obtained from harmonization.py. '
             'See pnlbwh/dMRIharmonization documentation for more details')
    parser.add_argument('-s', '--site', type= str, required=True,
                        help='site name for locating template FA and mask in tempalte directory')
    parser.add_argument('-t', '--template', type=str, required=True,
                        help='template directory where Mean_{site}_FA.nii.gz and {site}_Mask.nii.gz is located')
    parser.add_argument('--bshell_b', required=True, help='bvalue of the bshell')
    parser.add_argument('--ncpu', help='number of cpus to use', default= '4')

    args = parser.parse_args()
    imgList=abspath(args.input)
    siteName=args.site
    templatePath=abspath(args.template)
    bshell_b= int(args.bshell_b)
    N_proc= int(args.ncpu)

    # read FA image list
    try:
        imgs, _ = read_imgs_masks(imgList)
        print('imgs,masks list is provided. FA images are assumed to be directoryOfImg/dti/ImgPrefix_FA.nii.gz, make sure they are there')
        faImgs= []

        for imgPath in imgs:
            directory = dirname(imgPath)
            prefix = basename(imgPath).split('.nii')[0]
            faImg= pjoin(directory, 'dti', prefix+ '_FA.nii.gz')
            if not isfile(faImg):
                raise FileNotFoundError(f'{faImg} not found. Did you run \"--create --debug\" and \"--process --debug\" before?')

            faImgs.append(faImg)


    except:
        faImgs= read_imgs(imgList)
        print('FA image list is provided.')
    

    # register and obtain *_InMNI_FA.nii.gz
    mniFAimgs= sub2tmp2mni(templatePath, siteName, faImgs, bshell_b, N_proc)
    
    # save statistics for future
    statFile= os.path.join(self.templatePath, 'meanFAstat.txt')
    f= open(statFile,'a')
    stdout= sys.stdout
    sys.stdout= f

    print(datetime.now().strftime('%c'),'\n')

    print('b-shell', bshell_b, '\n')

    # pass *_InMNI_FA.nii.gz list to analyzeStat
    site_means= analyzeStat(mniFAimgs)
    print(f'{siteName} site: ')
    printStat(site_means, mniFAimgs)

    f.close()
    sys.stdout= stdout

    # print statistics on console
    print('')
    with open(statFile) as f:
        print(f.read())

    print('\nThe statistics are also saved in ', statFile)
Ejemplo n.º 5
0
def main():

    parser = argparse.ArgumentParser(
        description='''Warps diffusion measures (FA, MD, GFA) to template space 
    and then to MNI space. Finally, calculates mean FA over IITmean_FA_skeleton.nii.gz'''
    )
    parser.add_argument(
        '-i',
        '--input',
        type=str,
        required=True,
        help=
        'a .txt/.csv file that you used in/obtained from harmonization.py having two columns for (img,mask) pair. '
        'See documentation for more details')
    parser.add_argument(
        '-s',
        '--site',
        type=str,
        required=True,
        help='site name for locating template FA and mask in template directory'
    )
    parser.add_argument(
        '-t',
        '--template',
        type=str,
        required=True,
        help=
        'template directory where Mean_{site}_FA.nii.gz and {site}_Mask.nii.gz is located'
    )
    parser.add_argument('--ncpu', help='number of cpus to use', default='4')

    args = parser.parse_args()
    imgList = abspath(args.input)
    siteName = args.site
    templatePath = abspath(args.template)
    N_proc = int(args.ncpu)

    # read FA image list
    try:
        imgs, _ = read_imgs_masks(imgList)
        print(
            '(Img,Mask) list is provided. FA images are assumed to be directoryOfImg/dti/ImgPrefix_FA.nii.gz, make sure they are there\n'
        )
        faImgs = []

        for imgPath in imgs:
            directory = dirname(imgPath)
            prefix = basename(imgPath).split('.nii')[0]
            faImg = pjoin(directory, 'dti', prefix + '_FA.nii.gz')
            if not isfile(faImg):
                raise FileNotFoundError(
                    f'{faImg} not found. Did you run \"--create --debug\" and \"--process --debug\" before?'
                )

            faImgs.append(faImg)

    except:
        faImgs = read_imgs(imgList)
        print('FA image list is provided.')

    # register and obtain *_InMNI_FA.nii.gz
    mniFAimgs = sub2tmp2mni(templatePath, siteName, faImgs, N_proc)

    # target harmonized
    if imgList.endswith('.modified.harmonized'):
        header = siteName + '_after'
    # reference
    elif imgList.endswith('.modified'):
        header = siteName
    # target unprocessed
    else:
        header = siteName + '_before'

    # FIXME user FA image list will use the header {siteName+'_before'}, which is not correct all the time
    # as shown in the above block:
    # reference should use {siteName} while harmonized target should use {siteName+'_after'}
    # impact of this discrepancy is minor since we deprecated use of FA image list

    outPrefix = pjoin(templatePath, header)

    print('\n\nComputing statistics\n\n')
    print(f'{siteName} site: ')
    site_means = analyzeStat(mniFAimgs)
    generate_csv(faImgs, site_means, outPrefix)

    # save statistics for future
    statFile = pjoin(templatePath, 'meanFAstat.csv')
    with open(statFile, 'a') as f:
        f.write(datetime.now().strftime('%m/%d/%y %H:%M') +
                ',mean meanFA,std meanFA\n')
        f.write(f'{header},{np.mean(site_means)},{np.std(site_means)}\n')
        # print an empty line so future results, if appended, are visually separate
        f.write('\n')

    # print statistics on console
    print('\n\nPrinting statistics\n\n')
    with open(statFile) as f:
        print(f.read())

    # generate demonstrative plots
    ebar = harm_plot([site_means], [header], outPrefix)

    print(
        f'\nDetailed statistics, summary results, and demonstrative plots are saved in:\n\n{outPrefix}_stat.csv'
        f'\n{statFile}\n{ebar}\n')