Example #1
0
def estimate_hippocampus(subject, vem_iters=VEM_ITERS, beta=BETA, register=True): 
    f_im, f_msk = get_image_files(subject)
    f_tiv = get_tiv_image(subject) 
    im = load(f_im)
    msk = reorient_mask(load(f_msk), im) # just for posterior evaluation
    tiv = reorient_tiv(load(f_tiv), im) 
    print im.get_shape()
    print tiv.get_shape()
    save(im, 'fixed.nii')
    save(msk, 'fixed_mask.nii')
    save(tiv, 'fixed_tiv.nii')

    # register atlas and deform hippocampus ppm
    if register:
        if NIPY_REGISTRATION:
            I = load_image('template.nii')
            J = AffineImage(im.get_data(), im.get_affine(), 'scanner')
            R = HistogramRegistration(I, J, similarity='crl1', interp='pv')
            T = R.optimize('affine')
            if not HIPPO_CORNER == None:
                R.subsample(corner=HIPPO_CORNER, size=HIPPO_SIZE)
                T = R.optimize(T)
            save_image(resample(I, T.inv(), reference=J), 'r_template.nii')
            tmp = resample(load_image('hippocampus_prior.nii'), T.inv(),
                           reference=J, dtype='double')
            #save_image(tmp, 'r_hippocampus_prior.nii')
            tmp_data = np.minimum(np.maximum(tmp.get_data(), 0.0),
                                  USHORT_MAX).astype('uint16')
            save_image(AffineImage(tmp_data, tmp.affine, 'scanner'),
                       'r_hippocampus_prior.nii')
        else:
            system('./register template.nii fixed.nii hippocampus_prior.nii '
                   + 'r_hippocampus_prior.nii r_template.nii')
            if not HIPPO_CORNER == None:
                I = load_image('template.nii')
                Izoom = I[tuple([slice(c, c + s) for c, s
                                 in zip(HIPPO_CORNER, HIPPO_SIZE)])]

                print type(Izoom)

                save_image(Izoom, 'zoom_template.nii')
                system('./register zoom_template.nii fixed.nii '
                       + 'hippocampus_prior.nii '
                       + 'r_hippocampus_prior.nii r_template.nii')

    # perform tissue classification
    if vem_iters == 0:
        f_gray_ppm = get_gray_ppm_image(subject)
        gray_ppm = reorient_tiv(load(f_gray_ppm), im)
        save(gray_ppm, 'fixed_gray_ppm.nii')
        count_tiv = len(np.where(tiv.get_data() > 0)[0])
        count_gm = np.sum(gray_ppm.get_data())
    else:
        gray_ppm, csf_ppm, count_tiv = perform_tissue_classification(
            tiv, vem_iters, beta,
            scheme=SCHEME, noise=NOISE, labels=LABELS,
            mixmat=MIXMAT, freeze_prop=FREEZE_PROP)
        count_gm = np.sum(gray_ppm.get_data())
        count_csf = np.sum(csf_ppm.get_data())
        s2_gm = np.sum(gray_ppm.get_data() ** 2)
        s2_csf = np.sum(csf_ppm.get_data() ** 2)

    # compound hippocampus probabilities
    hippo_prior = load('r_hippocampus_prior.nii')
    hippo_ppm = compound_proba(hippo_prior, gray_ppm)
    save(hippo_ppm, 'r_hippocampus_ppm.nii')

    # estimate hippocampus volume
    jacobian = np.abs(np.linalg.det(hippo_prior.get_affine()))
    count_hippo = np.sum(from_ushort(hippo_prior.get_data()))
    count_hippo_gm = np.sum(hippo_ppm.get_data())
    s2_hippo = np.sum(from_ushort(hippo_prior.get_data()) ** 2)
    s2_hippo_gm = np.sum(hippo_ppm.get_data() ** 2)

    # compute Dice coefficient
    hippo_msk = np.where(msk.get_data() > 0)
    count_true_hippo = float(len(hippo_msk[0]))
    count_inter = np.sum(from_ushort(hippo_prior.get_data())[hippo_msk])
    dice_coeff = 2 * count_inter / (count_hippo + count_true_hippo)
    count_true_hippo_gm = np.sum(gray_ppm.get_data()[hippo_msk])

    # CSF
    hippo_csf_ppm = compound_proba(hippo_prior, csf_ppm)
    save(hippo_csf_ppm, 'r_hippocampus_csf_ppm.nii')
    count_hippo_csf = np.sum(hippo_csf_ppm.get_data())
    s2_hippo_csf = np.sum(hippo_csf_ppm.get_data() ** 2)

    # hack
    """
    dat = np.zeros(gray_ppm.get_shape())
    dat[hippo_msk] = gray_ppm.get_data()[hippo_msk]
    save(Nifti1Image(dat, gray_ppm.get_affine()), 'compound.nii')
    """
    def relative_std(count, s2):
        return np.sqrt(np.maximum(count - s2, 0.0))\
            / np.maximum(count, 1e-20)

    # output
    return {'tiv': count_tiv * jacobian,
            'gm': count_gm * jacobian,
            'csf': count_csf * jacobian,
            'hippo': count_hippo * jacobian,
            'hippo_gm': count_hippo_gm * jacobian,
            'hippo_csf': count_hippo_csf * jacobian,
            'true_hippo_gm': count_true_hippo_gm * jacobian,
            'true_hippo': count_true_hippo * jacobian,
            'dice': dice_coeff,
            'jacobian': jacobian,
            'gm_rstd': relative_std(count_gm, s2_gm),
            'csf_rstd': relative_std(count_csf, s2_csf),
            'hippo_rstd': relative_std(count_hippo, s2_hippo),
            'hippo_gm_rstd': relative_std(count_hippo_gm, s2_hippo_gm),
            'hippo_csf_rstd': relative_std(count_hippo_csf, s2_hippo_csf)}
save_image(Jat, 'affine_anubis_to_ammon.nii')

# Region matching
t0 = time.time()

##corners, size = get_blocks(I.shape, 3, 1, 0) #.5 size
##corners, size = get_blocks(I.shape, 6, 2, 0) #.75 size
##corners, size = get_blocks(I.shape, 6, 1, 0) # .5 size

corners, size = get_blocks(I.shape, 5, 2, 1)

affines = []
for corner in corners:
    print('Doing block: %s' % corner)
    Ar = A.copy()
    R.subsample(corner=corner, size=size)
    R.optimize(Ar)
    affines.append(Ar)

# Create polyaffine transform
t1 = time.time()
centers = np.array(corners) + (size - 1) / 2.
affines = [Ar.compose(Affine(I.affine)) for Ar in affines]
Tv = PolyAffine(centers, affines, .5 * size)

# Resample target image
t2 = time.time()
Jt = resample(J, Tv, reference=I, ref_voxel_coords=True)
###c = debug_resample(Tv, I, J)

# Save resampled image