Exemple #1
0
def fillHoles(imgPath):

    with TemporaryDirectory() as tmpdir, local.cwd(tmpdir):

        img= load(imgPath)
        data= img.get_data()

        dataBin= (data>0.)*1
        save_nifti('bin.nii.gz', dataBin.astype('uint8'), affine=img.affine, hdr=img.header)

        fslmaths['bin.nii.gz', '-fillh', 'bin_filled.nii.gz'] & FG

        dataBinFilled= load('bin_filled.nii.gz').get_data()

        dataDiff= dataBinFilled - dataBin

        dataFilled= data+ dataDiff*10e-8

        save_nifti(imgPath, dataFilled, affine= img.affine, hdr= img.header)
Exemple #2
0
def roi_analysis(imgs, cases, args, statsDir, roiDir, N_CPU):

    intLabels = load(args.labelMap).get_data()
    label2name = parse_labels(np.unique(intLabels)[1:], args.lut)
    commonLabels = average_labels(label2name.values())

    pool = Pool(N_CPU)
    for c, imgPath in zip(cases, imgs):

        # subject_stat(imgPath, c, args.modality, label2name, commonLabels, intLabels, roiDir, args.avg)
        pool.apply_async(func=subject_stat,
                         args=(imgPath, c, args.modality, label2name,
                               commonLabels, intLabels, roiDir, args.avg),
                         error_callback=RAISE)

    pool.close()
    pool.join()

    # combine csvs
    # stat_file= pjoin(roiDir, f'{c}_{modality}_roi.csv')
    # avg_stat_file= pjoin(roiDir, f'{c}_{modality}_roi_avg.csv')
    # read one stat_file, obtain headers
    df = pd.read_csv(pjoin(roiDir, f'{cases[0]}_{args.modality}_roi.csv'))
    df_comb = pd.DataFrame(columns=np.append('Cases', df['Tract'].values))

    for i, c in enumerate(cases):
        df = pd.read_csv(pjoin(roiDir, f'{c}_{args.modality}_roi.csv'))
        # num2str() text formatting is for precision control
        df_comb.loc[i] = np.append(
            c, np.array([num2str(x) for x in df['Average'].values]))

    combined_stat = pjoin(statsDir, f'{args.modality}_combined_roi.csv')
    df_comb.sort_index(axis=1).set_index('Cases').to_csv(combined_stat)
    print('Made ', combined_stat)

    if args.avg:
        # read one avg_stat_file, obtain headers
        df_avg = pd.read_csv(
            pjoin(roiDir, f'{cases[0]}_{args.modality}_roi_avg.csv'))
        df_avg_comb = pd.DataFrame(
            columns=np.append('Cases', df_avg['Tract'].values))

        for i, c in enumerate(cases):
            df = pd.read_csv(pjoin(roiDir, f'{c}_{args.modality}_roi_avg.csv'))
            # num2str() text formatting is for precision control
            df_avg_comb.loc[i] = np.append(
                c, np.array([num2str(x) for x in df['Average'].values]))

        combined_avg_stat = pjoin(statsDir,
                                  f'{args.modality}_combined_roi_avg.csv')
        df_avg_comb.sort_index(
            axis=1).set_index('Cases').to_csv(combined_avg_stat)
        print('Made ', combined_avg_stat)
Exemple #3
0
def dwiMask(dwImg, outPrefix, median_radius, num_pass):
    """See brain mask extraction documentaion at http://nipy.org/dipy/examples_built/brain_extraction_dwi.html"""

    inPrefix = dwImg.split('.')[0]
    img = load(dwImg)
    bvals, _ = read_bvals_bvecs(inPrefix + '.bval', None)

    # extract the first b0
    ind = np.where(bvals < 50)[0][0]
    _, mask = median_otsu(img.get_data()[..., ind], median_radius, num_pass)

    save_nifti(outPrefix + '_mask.nii.gz', mask.astype('uint8'), img.affine,
               img.header)
Exemple #4
0
def calc_mean(imgs, shape):

    cumsumFA = np.zeros(shape, dtype='float32')
    consecutiveFA = np.inf * np.ones((2, shape[0], shape[1], shape[2]))
    for i, imgPath in enumerate(imgs):
        data = load(imgPath).get_data().clip(min=0.)
        cumsumFA += data

        consecutiveFA[0, :] = data
        dynminFA = consecutiveFA.min(axis=0)
        consecutiveFA[1, :] = dynminFA

    return (cumsumFA, dynminFA)
def project_skeleton(c, imgPath, args, skelDir):
    '''
    Part of FSL (ID: 5.0.11)
    tbss_skeleton (Version 1.03)
    Copyright(c) 2005-2007, University of Oxford (Stephen Smith)

    Usage:
    tbss_skeleton -i <inputimage> -o <skeleton>
    tbss_skeleton -i <inputimage> -p <skel_thresh> <distancemap> <search_rule_mask> <4Ddata> <projected_4Ddata> [-a <alt_4D>] [-s <alt_skeleton>]}

    Compulsory arguments (You MUST set one or more of):
        -i,--in	    input image

    Optional arguments (You may optionally specify one or more of):
        -o,--out	output image
        -p          <skel_thresh> <distancemap> <search_rule_mask> <4Ddata> <projected_4Ddata>
        -a	        alternative 4Ddata (e.g. L1)
        -s	        alternative skeleton
        -h,--help	display this message
        -d,--debug	switch on debugging image outputs
        -D,--debug2 <skelpoints>	de-project <skelpoints> points on skeleton back to all_FA space
    '''

    # FIXME: what to use with -i when ANTS/ENIGMA, look into tbss_skeleton.cc code

    print(f'projecting {imgPath} on skeleton ...')
    modImgSkel = pjoin(skelDir, f'{c}_{args.modality}_to_target_skel.nii.gz')

    if args.modality == 'FA':

        check_call((' ').join([
            'tbss_skeleton', '-i', imgPath, '-p', args.SKEL_THRESH,
            args.skeletonMaskDst, args.SEARCH_RULE_MASK, imgPath, modImgSkel,
            '-s', args.skeletonMask
        ]),
                   shell=True)

    else:

        check_call((' ').join([
            'tbss_skeleton', '-i', imgPath, '-p', args.SKEL_THRESH,
            args.skeletonMaskDst, args.SEARCH_RULE_MASK,
            pjoin(args.outDir, 'FA', 'warped', f'{c}_FA_to_target.nii.gz'),
            modImgSkel, '-a', imgPath, '-s', args.skeletonMask
        ]),
                   shell=True)

    return load(modImgSkel).get_data()
Exemple #6
0
def calc_mean(imgs, shape, qc):

    # computing and saving allFA is a computational overhead
    # making it optional
    allFAdata = None
    if qc:
        allFAdata = np.zeros((len(imgs), shape[0], shape[1], shape[2]),
                             dtype='float32')
    cumsumFA = np.zeros(shape, dtype='float32')

    for i, imgPath in enumerate(imgs):
        data = load(imgPath).get_data().clip(min=0.)
        cumsumFA += data
        if qc:
            allFAdata[i, :] = data

    return (allFAdata, cumsumFA)
Exemple #7
0
def subject_stat(imgPath, c, modality, label2name, commonLabels, labelMap,
                 roiDir, avgFlag):

    print('Creating ROI based statistics for', imgPath)
    img = load(imgPath).get_data()
    _imgNonzero = img > 0

    df = pd.DataFrame(columns=['Tract', 'Average', 'nVoxels'])

    _img_roi = img[_imgNonzero]
    df.loc[0] = [f'Average{modality}'
                 ] + [num2str(x) for x in [_img_roi.mean(), _img_roi.size]]

    stat_file = pjoin(roiDir, f'{c}_{modality}_roi.csv')
    avg_stat_file = pjoin(roiDir, f'{c}_{modality}_roi_avg.csv')

    for i, intLabel in enumerate(label2name.keys()):
        roi = labelMap == int(intLabel)
        _roi = np.logical_and(_imgNonzero, roi)
        _img_roi = img[_roi]

        if _img_roi.size:
            df.loc[i + 1] = [label2name[intLabel]] + [
                num2str(x) for x in [_img_roi.mean(), _img_roi.size]
            ]
        else:
            df.loc[i + 1] = [label2name[intLabel]] + ['0', '0']

    df.set_index('Tract').to_csv(stat_file)
    # FIXME: save unsorted df to match with that of ENIGMA?
    # df.sort_values(by='Tract').set_index('Tract').to_csv(stat_file)
    print('Made ', stat_file)

    if avgFlag:

        df_avg = pd.DataFrame(columns=['Tract', 'Average', 'nVoxels'])
        df_avg.loc[0] = df.loc[0].copy()

        row = 1
        for common in commonLabels:
            dm = []
            num = []

            for i, label in enumerate(label2name.values()):
                # label.split('-') to avoid confusion between CP being in both CP-R and ICP-R
                if re.search(r'\b' + common + r'\b', label):

                    df_avg.loc[row] = df.loc[i +
                                             1].copy()  # Right or Left value
                    row += 1
                    dm.append(float(df.loc[i + 1][1]))
                    num.append(int(df.loc[i + 1][2]))

                    # since we are averaging over R/L only, len(dm) <= 2
                    if len(dm) == 2:
                        # average of R/L
                        df_avg.loc[row] = [
                            common,
                            num2str(
                                np.average(
                                    dm,
                                    weights=num if np.sum(num) else [1, 1])),
                            str(int(np.sum(num)))
                        ]
                        row = row + 1
                        break

        # FIXME: save unsorted df_avg so Tract, Right-Tract, and Left-Tract are together?
        df_avg.sort_values(by='Tract').set_index('Tract').to_csv(avg_stat_file)
        print('Made ', avg_stat_file)
Exemple #8
0
def skeletonize(imgs, cases, args, skelDir, miFile):

    target = load(args.template)
    targetData = target.get_data()
    X, Y, Z = targetData.shape[0], targetData.shape[1], targetData.shape[2]

    # provide the user with allFA sequence so he knows which volume he is looking at while scrolling through allFA
    seqFile = pjoin(args.statsDir, f'all_{args.modality}_sequence.txt')
    with open(seqFile, 'w') as f:
        f.write('index,caseid\n')
        for i, c in enumerate(cases):
            f.write(f'{i},{c}\n')

    print(f'Calculating mean {args.modality} over all the cases ...')
    allFAdata, cumsumFA = calc_mean(imgs, (X, Y, Z), args.qc)

    if args.qc:
        allFA = pjoin(args.statsDir, f'all_{args.modality}.nii.gz')
        save_nifti(allFA, np.moveaxis(allFAdata, 0, -1), target.affine,
                   target.header)

        print(
            f'''\n\nQC the warped {args.modality} images: {allFA}, view {seqFile} for index of volumes in all_FA.nii.gz. 
You may use fsleyes/fslview to load {allFA}.

MI metric b/w the warped images and target are stored in {miFile}

It might be helpful to re-run registration for warped images that are bad.

Moving images are   :   {args.outDir}/preproc/
Target is           :   {args.template}
Transform files are :   {args.xfrmDir}/
Warped images are   :   {args.outDir}/warped/

Save any re-registered images in {args.outDir}/warped/ with the same name as before

For re-registration of any subject, output the transform files to a temporary directory:
        
        mkdir /tmp/badRegistration/
        
        antsRegistrationSyNQuick.sh -d 3 \\
        -f TEMPLATE \\
        -m FA/preproc/caseid_FA.nii.gz \\
        -o /tmp/badRegistration/caseid_FA
        
        antsApplyTransforms -d 3 \\
        -i FA/preproc/caseid_FA.nii.gz \\
        -o FA/warped/caseid_[FA/MD/AD/RD]_to_target.nii.gz \\
        -r TEMPLATE \\
        -t /tmp/badRegistration/caseid_FA1Warp.nii.gz /tmp/badRegistration/caseid_FA0GenericAffine.mat
    
Finally, if wanted, you can copy the transform files to {args.xfrmDir}/ directory.

Note: Replace all the above directories with absolute paths.\n\n''')

        while input('Press Enter when you are done with QC/re-registration: '):
            pass

        allFAdata, cumsumFA = calc_mean(imgs, targetData.shape, args.qc)

    meanFAdata = cumsumFA / len(imgs)
    meanFA = pjoin(args.statsDir, 'mean_FA.nii.gz')

    # outDir should contain
    # all_{modality}.nii.gz
    # mean_FA.nii.gz
    # mean_FA_mask.nii.gz
    # mean_FA_skeleton.nii.gz
    # mean_FA_skeleton_mask.nii.gz
    # mean_FA_skeleton_mask_dst.nii.gz

    if args.modality == 'FA':

        if not args.templateMask:
            print('Creating template mask ...')
            args.templateMask = pjoin(args.statsDir, 'mean_FA_mask.nii.gz')
            meanFAmaskData = (meanFAdata > 0) * 1
            save_nifti(args.templateMask, meanFAmaskData.astype('uint8'),
                       target.affine, target.header)

        else:
            meanFAmaskData = load(args.templateMask).get_data()

        meanFAdata = meanFAdata * meanFAmaskData
        save_nifti(meanFA, meanFAdata, target.affine, target.header)

        # if skeleton is not given:
        #     create all three of skeleton, skeletonMask, and skeletonMaskDst

        # if skeleton is given and (neither skeletonMask nor skeletonMaskDst is given):
        #     create skeletonMask and skeletonMaskDst

        # if skeleton and skeletonMask is given and skeletonMaskDst is not given:
        #     create skeletonMaskDst

        if not args.skeleton:
            print(
                'Creating all three of skeleton, skeletonMask, and skeletonMaskDst ...'
            )
            args.skeleton = pjoin(args.statsDir, 'mean_FA_skeleton.nii.gz')
            args.skeletonMask = pjoin(args.statsDir,
                                      'mean_FA_skeleton_mask.nii.gz')
            args.skeletonMaskDst = pjoin(args.statsDir,
                                         'mean_FA_skeleton_mask_dst.nii.gz')

            _create_skeleton(meanFA, args.skeleton)
            _create_skeletonMask(args.skeleton, args.SKEL_THRESH,
                                 args.skeletonMask)
            _create_skeletonMaskDst(args.templateMask, args.skeletonMask,
                                    args.skeletonMaskDst)

        if args.skeleton and not (args.skeletonMask or args.skeletonMaskDst):
            print('Creating skeletonMask and skeletonMaskDst ...')
            args.skeletonMask = pjoin(args.statsDir,
                                      'mean_FA_skeleton_mask.nii.gz')
            args.skeletonMaskDst = pjoin(args.statsDir,
                                         'mean_FA_skeleton_mask_dst.nii.gz')

            _create_skeletonMask(args.skeleton, args.SKEL_THRESH,
                                 args.skeletonMask)
            _create_skeletonMaskDst(args.templateMask, args.skeletonMask,
                                    args.skeletonMaskDst)

        if args.skeleton and not args.skeletonMask and args.skeletonMaskDst:
            print('Creating skeletonMask ...')
            args.skeletonMask = pjoin(args.statsDir,
                                      'mean_FA_skeleton_mask.nii.gz')

            _create_skeletonMask(args.skeleton, args.SKEL_THRESH,
                                 args.skeletonMask)

        if (args.skeleton and args.skeletonMask) and not args.skeletonMaskDst:
            print('Creating skeletonMaskDst ...')
            args.skeletonMaskDst = pjoin(args.statsDir,
                                         'mean_FA_skeleton_mask_dst.nii.gz')

            _create_skeletonMaskDst(args.templateMask, args.skeletonMask,
                                    args.skeletonMaskDst)

    # mask allFA, this step does not seem to have any effect on the pipeline, it should help the user to visualize only
    if args.qc:
        check_call(
            (' ').join(['fslmaths', allFA, '-mas', args.templateMask, allFA]),
            shell=True)

    # projecting all {modality} data onto skeleton
    pool = Pool(args.ncpu)
    for c, imgPath in zip(cases, imgs):
        pool.apply_async(project_skeleton, (c, imgPath, args, skelDir),
                         error_callback=RAISE)

    pool.close()
    pool.join()

    if not args.noAllSkeleton:

        allFAskeletonized = pjoin(args.statsDir,
                                  f'all_{args.modality}_skeletonized.nii.gz')
        print('Creating ', allFAskeletonized)

        # this loop has been moved out of multiprocessing block to prevent memroy error
        allFAskeletonizedData = np.zeros((len(imgs), X, Y, Z), dtype='float32')
        for i, c in enumerate(cases):
            allFAskeletonizedData[i, :] = load(
                pjoin(
                    skelDir,
                    f'{c}_{args.modality}_to_target_skel.nii.gz')).get_data()

        save_nifti(allFAskeletonized, np.moveaxis(allFAskeletonizedData, 0,
                                                  -1), target.affine,
                   target.header)
        print(
            f'Created {allFAskeletonized} and corresponding index file: {seqFile}'
        )

    return args