Exemplo n.º 1
0
def apply_repro_analysis(dataset, thresholds=[3.0], method = 'crfx'):
    """
    perform the reproducibility  analysis according to the 
    """
    from nipy.labs.spatial_models.discrete_domain import grid_domain_from_array

    nsubj, dimx, dimy = dataset.shape
    
    func = np.reshape(dataset,(nsubj, dimx * dimy)).T
    var = np.ones((dimx * dimy, nsubj))
    domain = grid_domain_from_array(np.ones((dimx, dimy, 1)))

    ngroups = 5
    sigma = 2.0
    csize = 10
    niter = 5
    verbose = 0
    swap = False

    kap, clt, pkd = [], [], []
    for threshold in thresholds:
        kappa, cls, pks = [], [], []
        kwargs = {'threshold':threshold, 'csize':csize}        
        for i in range(niter):
            k = voxel_reproducibility(func, var, domain, ngroups,
                                  method, swap, verbose, **kwargs)
            kappa.append(k)
            cld = cluster_reproducibility(func, var, domain, ngroups, sigma,
                                      method, swap, verbose, **kwargs)
            cls.append(cld)
            pk = peak_reproducibility(func, var, domain, ngroups, sigma,
                                      method, swap, verbose, **kwargs)
            pks.append(pk)
        
        kap.append(np.array(kappa))
        clt.append(np.array(cls))
        pkd.append(np.array(pks))
    kap = np.array(kap)
    clt = np.array(clt)
    pkd = np.array(pkd)
    return kap, clt, pkd
Exemplo n.º 2
0
def group_reproducibility_metrics(
    mask_images, contrast_images, variance_images, thresholds, ngroups,
    method, cluster_threshold=10, number_of_samples=10, sigma=6.,
    do_clusters=True, do_voxels=True, do_peaks=True, swap=False):
    """
    Main function to perform reproducibility analysis, including nifti1 io

    Parameters
    ----------
    threshold: list or 1-d array,
               the thresholds to be tested

    Returns
    -------
    cluster_rep_results: dictionary,
                         results of cluster-level reproducibility analysi
    voxel_rep_results: dictionary,
                       results of voxel-level reproducibility analysis
    peak_rep_results: dictionary,
                      results of peak-level reproducibility analysis
    """
    from nibabel import load
    from ..mask import intersect_masks
    
    if ((len(variance_images) == 0) & (method is not 'crfx')):
        raise ValueError('Variance images are necessary')

    nsubj = len(contrast_images)

    # compute the group mask
    affine = load(mask_images[0]).get_affine()
    mask = intersect_masks(mask_images, threshold=0) > 0
    domain = grid_domain_from_array(mask, affine)

    # read the data
    group_con = []
    group_var = []
    for s in range(nsubj):
        group_con.append(load(contrast_images[s]).get_data()[mask])
        if len(variance_images) > 0:
            group_var.append(load(variance_images[s]).get_data()[mask])

    group_con = np.squeeze(np.array(group_con)).T
    group_con[np.isnan(group_con)] = 0
    if len(variance_images) > 0:
        group_var = np.squeeze(np.array(group_var)).T
        group_var[np.isnan(group_var)] = 0
        group_var = np.maximum(group_var, 1.e-15)

    # perform the analysis
    voxel_rep_results = {}
    cluster_rep_results = {}
    peak_rep_results = {}

    for ng in ngroups:
        if do_voxels:
            voxel_rep_results.update({ng: {}})
        if do_clusters:
            cluster_rep_results.update({ng: {}})
        if do_peaks:
            peak_rep_results.update({ng: {}})
        for th in thresholds:
            kappa = []
            cls = []
            pk = []
            kwargs = {'threshold': th, 'csize': cluster_threshold}

            for i in range(number_of_samples):
                if do_voxels:
                    kappa.append(voxel_reproducibility(
                            group_con, group_var, domain, ng, method, swap,
                            **kwargs))
                if do_clusters:
                    cls.append(cluster_reproducibility(
                            group_con, group_var, domain, ng, sigma, method,
                            swap, **kwargs))
                if do_peaks:
                    pk.append(peak_reproducibility(
                            group_con, group_var, domain, ng, sigma, method,
                            swap, **kwargs))

            if do_voxels:
                voxel_rep_results[ng].update({th: np.array(kappa)})
            if do_clusters:
                cluster_rep_results[ng].update({th: np.array(cls)})
            if do_peaks:
                peak_rep_results[ng].update({th: np.array(cls)})

    return voxel_rep_results, cluster_rep_results, peak_rep_results
Exemplo n.º 3
0
# Generate the data 
nsubj = 105
dimx = 60
dimy = 60
pos = np.array([[12, 14],
                [20, 20],
                [30, 20]])
ampli = np.array([2.5, 3.5, 3])
dataset = simul.surrogate_2d_dataset(nbsubj=nsubj, dimx=dimx, dimy=dimy, 
                                     pos=pos, ampli=ampli, width=5.0)
betas = np.reshape(dataset, (nsubj, dimx, dimy))

# set the variance at 1 everywhere
func = np.reshape(betas, (nsubj, dimx * dimy)).T
var = np.ones((dimx * dimy, nsubj))
domain = grid_domain_from_array(np.ones((dimx, dimy, 1)))

###############################################################################
# Run reproducibility analysis 

ngroups = 10
thresholds = np.arange(.5, 6., .5)
sigma = 2.0
csize = 10
niter = 10
method = 'crfx'
verbose = 0

# do not use permutations
swap = False
Exemplo n.º 4
0
dimy = 60
pos = 3 * np.array([[6, 7],
                  [10, 10],
                  [15, 10]])
ampli = np.array([5, 7, 6])
sjitter = 6.0
dataset = simul.surrogate_2d_dataset(nbsubj=nsubj, dimx=dimx, dimy=dimy, 
                                     pos=pos, ampli=ampli, width=10.0)
# dataset represents 2D activation images from nsubj subjects,
# with shape (dimx, dimy)

# step 2 : prepare all the information for the parcellation
nbparcel = 10
ref_dim = (dimx, dimy)
ldata = np.reshape(dataset, (nsubj, dimx * dimy, 1))
domain = dom.grid_domain_from_array(np.ones(ref_dim))

# step 3 : run the algorithm
Pa = hp.hparcel(domain, ldata, nbparcel, mu=3.0)
# note: play with mu to change the 'stiffness of the parcellation'

# step 4:  look at the results
Label = np.array([np.reshape(Pa.individual_labels[:, s], (dimx, dimy))
                   for s in range(nsubj)])

import matplotlib.pylab as mp
mp.figure(figsize=(8, 4))
mp.title('Input data')
for s in range(nsubj):
    mp.subplot(2, 5, s + 1)
    mp.imshow(dataset[s], interpolation='nearest')