def consistencyCheck(ref_csv,
                     outputBshellFile=None,
                     outPutResolutionFile=None):

    try:
        ref_imgs, _ = read_imgs_masks(ref_csv)
    except:
        ref_imgs = read_imgs(ref_csv)

    if isfile(outputBshellFile) and isfile(outPutResolutionFile):
        ref_bvals = read_bvals(outputBshellFile)
        ref_res = np.load(outPutResolutionFile)
    else:
        ref_bshell_img = ref_imgs[0]
        print(f'Using {ref_bshell_img} to determine b-shells')

        inPrefix = abspath(ref_bshell_img).split('.nii')[0]
        ref_bvals = findBShells(inPrefix + '.bval', outputBshellFile)

        ref_res = load(ref_bshell_img).header['pixdim'][1:4]
        np.save(outPutResolutionFile, ref_res)

    print('b-shells are', ref_bvals)

    print('\nSite', ref_csv, '\n')

    print('Checking consistency of b-shells among subjects')
    check_bshells(ref_imgs, ref_bvals)

    print('spatial resolution is', ref_res)
    print('Checking consistency of spatial resolution among subjects')
    check_resolution(ref_imgs, ref_res)
Ejemplo n.º 2
0
def verifyNshmForAll(csvFile, N_shm):

    for imgPath in read_imgs_masks(csvFile)[0]:
        directory = dirname(imgPath)
        prefix = basename(imgPath).split('.nii')[0]
        bvalFile = pjoin(directory, prefix + '.bval')
        verifyNshm(N_shm, bvalFile)
def printStat(ref_mean, csvFile):

    print('mean FA over IIT_mean_FA_skeleton.nii.gz for all cases: ')
    imgs, _ = read_imgs_masks(csvFile)
    for i, imgPath in enumerate(imgs):
        print(basename(imgPath), ref_mean[i])

    print('')
    print('mean meanFA: ', np.mean(ref_mean))
    print('std meanFA: ', np.std(ref_mean))
    print('')
Ejemplo n.º 4
0
def generate_csv(imgs, site_means, outPrefix):

    try:
        imgs, _ = read_imgs_masks(imgs)
    except:
        pass

    stat = {'subject': [basename(f) for f in imgs], 'meanFA': site_means}
    df = pd.DataFrame(stat)
    statFile = outPrefix + '_stat.csv'

    df.to_csv(statFile, index=False)
Ejemplo n.º 5
0
def separateAllBshells(ref_csv, ref_bvals_file, ncpu=4, outPrefix=None):

    ref_bvals = read_bvals(ref_bvals_file)

    try:
        imgs, masks = read_imgs_masks(ref_csv)
    except:
        imgs = read_imgs(ref_csv)
        masks = None

    pool = Pool(int(ncpu))
    for imgPath in imgs:
        pool.apply_async(separateBshells,
                         kwds={
                             'imgPath': imgPath,
                             'ref_bvals': ref_bvals
                         },
                         error_callback=RAISE)

    pool.close()
    pool.join()

    if outPrefix:
        outPrefix = abspath(outPrefix)
    else:
        return

    for bval in ref_bvals:

        f = open(f'{outPrefix}_b{int(bval)}.csv', 'w')

        if masks:
            for imgPath, maskPath in zip(imgs, masks):
                inPrefix = abspath(imgPath).split('.nii')[0]
                bPrefix = inPrefix + f'_b{int(bval)}'
                f.write(f'{bPrefix}.nii.gz,{maskPath}\n')

        else:
            for imgPath in imgs:
                inPrefix = abspath(imgPath).split('.nii')[0]
                bPrefix = inPrefix + f'_b{int(bval)}'
                f.write(f'{bPrefix}.nii.gz\n')

        f.close()
Ejemplo n.º 6
0
    def main(self):

        self.template = str(self.template)
        self.labelMap = str(self.labelMap)

        imgs, masks = read_imgs_masks(self.imagelist)

        if int(self.N_proc) == -1:
            self.N_proc = psutil.cpu_count()

        pool = multiprocessing.Pool(int(self.N_proc))
        for imgPath, maskPath in zip(imgs, masks):
            imgPath = imgPath
            inPrefix = imgPath.split('.')[0]

            bvalFile = None
            bvecFile = None
            if imgPath.endswith('.nii') or imgPath.endswith('.nii.gz'):
                bvalFile = inPrefix + '.bval'
                bvecFile = inPrefix + '.bvec'

            out_dir = join(dirname(imgPath), self.qcDir)

            if isdir(out_dir):
                # force re-run
                rmtree(out_dir)
            mkdir(out_dir)
            pool.apply_async(func=dwi_quality_wrapper,
                             args=(imgPath, maskPath, bvalFile, bvecFile,
                                   self.mk_low_high, self.fa_low_high,
                                   self.md_low_high, out_dir, self.name,
                                   self.template, self.labelMap,
                                   self.lut._path if self.lut else None))

        pool.close()
        pool.join()

        summarize_csvs(imgs, self.labelMap,
                       self.lut._path if self.lut else None, self.qcDir,
                       self.name, self.out_csv)
def joinAllBshells(tar_csv, ref_bvals_file, separatedPrefix=None, ncpu=4):

    ref_bvals = read_bvals(ref_bvals_file)
    if tar_csv:

        try:
            imgs, _ = read_imgs_masks(tar_csv)
        except:
            imgs = read_imgs(tar_csv)

        pool = Pool(int(ncpu))
        for imgPath in imgs:
            pool.apply_async(joinBshells,
                             kwds=({
                                 'imgPath': imgPath,
                                 'ref_bvals': ref_bvals,
                                 'sep_prefix': separatedPrefix
                             }),
                             error_callback=RAISE)

        pool.close()
        pool.join()
    def main(self):

        self.templatePath = abspath(self.templatePath)
        self.N_shm = int(self.N_shm)
        self.N_proc = int(self.N_proc)
        if self.N_proc == -1:
            self.N_proc = N_CPU

        if self.target_csv.endswith('.modified'):
            self.tar_unproc_csv = str(self.target_csv).split('.modified')[0]
        else:
            self.tar_unproc_csv = str(self.target_csv)

        # check appropriateness of N_shm
        if self.N_shm != -1 and (self.N_shm < 2 or self.N_shm > 8):
            raise ValueError('2<= --nshm <=8')

        # determine N_shm in default mode during template creation
        if self.N_shm == -1 and self.create:
            if self.ref_csv:
                ref_nshm_img = read_imgs_masks(self.ref_csv)[0][0]
            elif self.target_csv:
                ref_nshm_img = read_imgs_masks(self.target_csv)[0][0]

            directory = dirname(ref_nshm_img)
            prefix = basename(ref_nshm_img).split('.nii')[0]
            bvalFile = pjoin(directory, prefix + '.bval')
            self.N_shm, _ = determineNshm(bvalFile)

        # automatic determination of N_shm during data harmonization is limited by N_shm used during template creation
        # Scale_L{i}.nii.gz of <= {N_shm during template creation} are present only
        elif self.N_shm == -1 and self.process:
            for i in range(0, 8, 2):
                if isfile(
                        pjoin(self.templatePath,
                              f'Scale_L{i}_b{self.bshell_b}.nii.gz')):
                    self.N_shm = i
                else:
                    break

        # verify validity of provided/determined N_shm for all subjects
        if self.ref_csv:
            verifyNshmForAll(self.ref_csv, self.N_shm)
        if self.target_csv:
            verifyNshmForAll(self.target_csv, self.N_shm)

        # write config file to temporary directory
        configFile = f'/tmp/harm_config_{os.getpid()}.ini'
        with open(configFile, 'w') as f:
            f.write('[DEFAULT]\n')
            f.write(f'N_shm = {self.N_shm}\n')
            f.write(f'N_proc = {self.N_proc}\n')
            f.write(f'N_zero = {self.N_zero}\n')
            f.write(f'resample = {self.resample if self.resample else 0}\n')
            f.write(f'bvalMap = {self.bvalMap if self.bvalMap else 0}\n')
            f.write(f'bshell_b = {self.bshell_b}\n')
            f.write(f'denoise = {1 if self.denoise else 0}\n')
            f.write(f'travelHeads = {1 if self.travelHeads else 0}\n')
            f.write(f'debug = {1 if self.debug else 0}\n')
            f.write(f'verbose = {1 if self.verbose else 0}\n')
            f.write('diffusionMeasures = {}\n'.format(
                (',').join(self.diffusionMeasures)))

        self.sanityCheck()

        if self.create:
            self.createTemplate()

        if self.process:
            self.harmonizeData()

        if self.create and self.process and self.debug:
            self.post_debug()

        os.remove(configFile)
Ejemplo n.º 9
0
def main():

    parser = argparse.ArgumentParser(description='''Warps diffusion measures (FA, MD, GFA) to template space 
    and then to MNI space. Finally, calculates mean FA over IITmean_FA_skeleton.nii.gz''')
    parser.add_argument('-i', '--input', type=str, required=True, 
        help='a .txt/.csv file having one column for FA imgs, '
             'or two columns for (img,mask) pair, the latter list is what you used in/obtained from harmonization.py. '
             'See pnlbwh/dMRIharmonization documentation for more details')
    parser.add_argument('-s', '--site', type= str, required=True,
                        help='site name for locating template FA and mask in tempalte directory')
    parser.add_argument('-t', '--template', type=str, required=True,
                        help='template directory where Mean_{site}_FA.nii.gz and {site}_Mask.nii.gz is located')
    parser.add_argument('--bshell_b', required=True, help='bvalue of the bshell')
    parser.add_argument('--ncpu', help='number of cpus to use', default= '4')

    args = parser.parse_args()
    imgList=abspath(args.input)
    siteName=args.site
    templatePath=abspath(args.template)
    bshell_b= int(args.bshell_b)
    N_proc= int(args.ncpu)

    # read FA image list
    try:
        imgs, _ = read_imgs_masks(imgList)
        print('imgs,masks list is provided. FA images are assumed to be directoryOfImg/dti/ImgPrefix_FA.nii.gz, make sure they are there')
        faImgs= []

        for imgPath in imgs:
            directory = dirname(imgPath)
            prefix = basename(imgPath).split('.nii')[0]
            faImg= pjoin(directory, 'dti', prefix+ '_FA.nii.gz')
            if not isfile(faImg):
                raise FileNotFoundError(f'{faImg} not found. Did you run \"--create --debug\" and \"--process --debug\" before?')

            faImgs.append(faImg)


    except:
        faImgs= read_imgs(imgList)
        print('FA image list is provided.')
    

    # register and obtain *_InMNI_FA.nii.gz
    mniFAimgs= sub2tmp2mni(templatePath, siteName, faImgs, bshell_b, N_proc)
    
    # save statistics for future
    statFile= os.path.join(self.templatePath, 'meanFAstat.txt')
    f= open(statFile,'a')
    stdout= sys.stdout
    sys.stdout= f

    print(datetime.now().strftime('%c'),'\n')

    print('b-shell', bshell_b, '\n')

    # pass *_InMNI_FA.nii.gz list to analyzeStat
    site_means= analyzeStat(mniFAimgs)
    print(f'{siteName} site: ')
    printStat(site_means, mniFAimgs)

    f.close()
    sys.stdout= stdout

    # print statistics on console
    print('')
    with open(statFile) as f:
        print(f.read())

    print('\nThe statistics are also saved in ', statFile)
Ejemplo n.º 10
0
def main():

    parser = argparse.ArgumentParser(
        description='''Warps diffusion measures (FA, MD, GFA) to template space 
    and then to MNI space. Finally, calculates mean FA over IITmean_FA_skeleton.nii.gz'''
    )
    parser.add_argument(
        '-i',
        '--input',
        type=str,
        required=True,
        help=
        'a .txt/.csv file that you used in/obtained from harmonization.py having two columns for (img,mask) pair. '
        'See documentation for more details')
    parser.add_argument(
        '-s',
        '--site',
        type=str,
        required=True,
        help='site name for locating template FA and mask in template directory'
    )
    parser.add_argument(
        '-t',
        '--template',
        type=str,
        required=True,
        help=
        'template directory where Mean_{site}_FA.nii.gz and {site}_Mask.nii.gz is located'
    )
    parser.add_argument('--ncpu', help='number of cpus to use', default='4')

    args = parser.parse_args()
    imgList = abspath(args.input)
    siteName = args.site
    templatePath = abspath(args.template)
    N_proc = int(args.ncpu)

    # read FA image list
    try:
        imgs, _ = read_imgs_masks(imgList)
        print(
            '(Img,Mask) list is provided. FA images are assumed to be directoryOfImg/dti/ImgPrefix_FA.nii.gz, make sure they are there\n'
        )
        faImgs = []

        for imgPath in imgs:
            directory = dirname(imgPath)
            prefix = basename(imgPath).split('.nii')[0]
            faImg = pjoin(directory, 'dti', prefix + '_FA.nii.gz')
            if not isfile(faImg):
                raise FileNotFoundError(
                    f'{faImg} not found. Did you run \"--create --debug\" and \"--process --debug\" before?'
                )

            faImgs.append(faImg)

    except:
        faImgs = read_imgs(imgList)
        print('FA image list is provided.')

    # register and obtain *_InMNI_FA.nii.gz
    mniFAimgs = sub2tmp2mni(templatePath, siteName, faImgs, N_proc)

    # target harmonized
    if imgList.endswith('.modified.harmonized'):
        header = siteName + '_after'
    # reference
    elif imgList.endswith('.modified'):
        header = siteName
    # target unprocessed
    else:
        header = siteName + '_before'

    # FIXME user FA image list will use the header {siteName+'_before'}, which is not correct all the time
    # as shown in the above block:
    # reference should use {siteName} while harmonized target should use {siteName+'_after'}
    # impact of this discrepancy is minor since we deprecated use of FA image list

    outPrefix = pjoin(templatePath, header)

    print('\n\nComputing statistics\n\n')
    print(f'{siteName} site: ')
    site_means = analyzeStat(mniFAimgs)
    generate_csv(faImgs, site_means, outPrefix)

    # save statistics for future
    statFile = pjoin(templatePath, 'meanFAstat.csv')
    with open(statFile, 'a') as f:
        f.write(datetime.now().strftime('%m/%d/%y %H:%M') +
                ',mean meanFA,std meanFA\n')
        f.write(f'{header},{np.mean(site_means)},{np.std(site_means)}\n')
        # print an empty line so future results, if appended, are visually separate
        f.write('\n')

    # print statistics on console
    print('\n\nPrinting statistics\n\n')
    with open(statFile) as f:
        print(f.read())

    # generate demonstrative plots
    ebar = harm_plot([site_means], [header], outPrefix)

    print(
        f'\nDetailed statistics, summary results, and demonstrative plots are saved in:\n\n{outPrefix}_stat.csv'
        f'\n{statFile}\n{ebar}\n')
Ejemplo n.º 11
0
    def main(self):
        
        self.sanityCheck()       

        self.templatePath= abspath(self.templatePath)
        self.N_shm= int(self.N_shm)
        self.N_proc= int(self.N_proc)
        if self.N_proc==-1:
            self.N_proc= N_CPU

        if self.ref_csv:
            self.ref_unproc_csv= self.ref_csv.strip('.modified')
        self.tar_unproc_csv= self.target_csv.strip('.modified')

        if not self.stats:        
            # check appropriateness of N_shm
            if self.N_shm!=-1 and (self.N_shm<2 or self.N_shm>8):
                raise ValueError('2<= --nshm <=8')



            # determine N_shm in default mode during template creation
            if self.N_shm==-1 and self.create:
                if self.ref_csv:
                    ref_nshm_img = read_imgs_masks(self.ref_csv)[0][0]
                elif self.target_csv:
                    ref_nshm_img = read_imgs_masks(self.target_csv)[0][0]

                directory= dirname(ref_nshm_img)
                prefix= basename(ref_nshm_img).split('.nii')[0]
                bvalFile= pjoin(directory, prefix+'.bval')
                self.N_shm, _= determineNshm(bvalFile)


            # automatic determination of N_shm during data harmonization is limited by N_shm used during template creation
            # Scale_L{i}.nii.gz of <= {N_shm during template creation} are present only
            elif self.N_shm==-1 and self.process:
                for i in range(0,8,2):
                    if isfile(pjoin(self.templatePath, f'Scale_L{i}.nii.gz')):
                        self.N_shm= i
                    else:
                        break


            # verify validity of provided/determined N_shm for all subjects
            # single-shell-ness is verified inside verifyNshmForAll
            if self.ref_csv:
                verifyNshmForAll(self.ref_csv, self.N_shm)
            if self.target_csv:
                verifyNshmForAll(self.target_csv, self.N_shm)

        # copy provided config file to temporary directory
        configFile= pjoin(gettempdir(),f'harm_config_{getpid()}.ini')
        with open(configFile,'w') as f:
            f.write('[DEFAULT]\n')
            f.write(f'N_shm = {self.N_shm}\n')
            f.write(f'N_proc = {self.N_proc}\n')
            f.write(f'N_zero = {self.N_zero}\n')
            f.write(f'resample = {self.resample if self.resample else 0}\n')
            f.write(f'bvalMap = {self.bvalMap if self.bvalMap else 0}\n')
            f.write(f'denoise = {1 if self.denoise else 0}\n')
            f.write(f'travelHeads = {1 if self.travelHeads else 0}\n')
            f.write(f'debug = {1 if self.debug else 0}\n')
            f.write(f'force = {1 if self.force else 0}\n')
            f.write('diffusionMeasures = {}\n'.format((',').join(self.diffusionMeasures)))


        if self.create:
            self.createTemplate()
            import fileinput
            for line in fileinput.input(configFile, inplace=True):
                if 'force' in line:
                    print('force = 0')
                else:
                    print(line)
            self.force= False
            
        if self.process:
            self.harmonizeData()

        if self.create and self.process and self.debug:
            self.post_debug()

        if self.stats:
            if (self.create or self.process or self.debug):
                raise AttributeError('--stats option is for recomputing site statistics exclusively')
            else:
                self.showStat()

        remove(configFile)
Ejemplo n.º 12
0
    def harmonizeData(self):

        from reconstSignal import reconst, approx
        from preprocess import dti_harm, common_processing, preprocessing

        # check the templatePath
        if not exists(self.templatePath):
            raise NotADirectoryError(f'{self.templatePath} does not exist')
        else:
            if not listdir(self.templatePath):
                raise ValueError(f'{self.templatePath} is empty')



        # fit spherical harmonics on reference site
        if self.debug and self.ref_csv:
            check_csv(self.ref_unproc_csv, self.force)
            refImgs, refMasks= read_imgs_masks(self.ref_unproc_csv)
            res= []
            pool = multiprocessing.Pool(self.N_proc)
            for imgPath, maskPath in zip(refImgs, refMasks):
                res.append(pool.apply_async(func=preprocessing, args=(imgPath, maskPath)))

            attributes = [r.get() for r in res]

            pool.close()
            pool.join()

            for i in range(len(refImgs)):
                refImgs[i] = attributes[i][0]
                refMasks[i] = attributes[i][1]

            pool = multiprocessing.Pool(self.N_proc)
            for imgPath, maskPath in zip(refImgs, refMasks):
                pool.apply_async(func= approx, args=(imgPath,maskPath,))

            pool.close()
            pool.join()



        # go through each file listed in csv, check their existence, create dti and harm directories
        check_csv(self.target_csv, self.force)
        targetImgs, targetMasks= common_processing(self.tar_unproc_csv)


        # reconstSignal steps ------------------------------------------------------------------------------------------

        moving= pjoin(self.templatePath, f'Mean_{self.target}_FA.nii.gz')


        if not self.target_csv.endswith('.modified'):
            self.target_csv += '.modified'


        self.harm_csv= self.target_csv+'.harmonized'
        fh= open(self.harm_csv, 'w')
        pool = multiprocessing.Pool(self.N_proc)
        res= []
        for imgPath, maskPath in zip(targetImgs, targetMasks):
            res.append(pool.apply_async(func= reconst, args= (imgPath, maskPath, moving, self.templatePath,)))

        for r in res:
            harmImg, harmMask= r.get()
            fh.write(harmImg + ',' + harmMask + '\n')


        pool.close()
        pool.join()

        fh.close()
        
        
        if self.debug:
            harmImgs, harmMasks= read_imgs_masks(self.harm_csv)
            pool = multiprocessing.Pool(self.N_proc)
            for imgPath,maskPath in zip(harmImgs,harmMasks):
                pool.apply_async(func= dti_harm, args= (imgPath,maskPath,))
            pool.close()
            pool.join()
            
        print('\n\nHarmonization completed\n\n')