def signal_to_pproba(test, learn=None, method='prior', alpha=0.01, verbose=0):
    """
    Convert a set of z-values to posterior probabilities of being active

    Parameters
    ----------
    test: array pf shape(n_samples, 1),
           data that is assessed
    learn: array pf shape(n_samples, 1), optional
           data to learn a mixture model
    method: string, optional, to be chosen within
            ['gauss_mixture', 'emp_null', 'gam_gauss', 'prior']
    alpha: float in the [0,1], optional,
           parameter that yields the prior probability that a region is active
           should be chosen close to 0
    """
    if method=='gauss_mixture':
        prior_strength = 100
        fixed_scale = True
        bfp = en.three_classes_GMM_fit(
            learn, test, alpha, prior_strength,verbose, fixed_scale)
        bf0 = bfp[:,1]
    elif method== 'emp_null':
        enn = en.ENN(learn)
        enn.learn()
        bf0 = np.reshape(enn.fdr(test),np.size(bf0))
    elif method=='gam_gauss':
        bfp  = en.Gamma_Gaussian_fit(learn, test, verbose)
        bf0 = bfp[:,1]
    elif method=='prior':
        y0 = st.norm.pdf(test)
        shape_, scale_ = 3., 2.
        y1 = st.gamma.pdf(test, shape_, scale=scale_) 
        bf0 = np.ravel((1-alpha)*y0 / (alpha*y1 + (1-alpha)*y0))
    else: raise ValueError, 'Unknown method'
    return bf0
예제 #2
0
nim = load(mask_image)
mask = nim.get_data()

# read the functional image
rbeta = load(input_image)
beta = rbeta.get_data()
beta = beta[mask>0]

mf = mp.figure()
a1 = mp.subplot(1,3,1)
a2 = mp.subplot(1,3,2)
a3 = mp.subplot(1,3,3)

# fit beta's histogram with a Gamma-Gaussian mixture
bfm = np.array([2.5,3.0,3.5,4.0,4.5])
bfp = en.Gamma_Gaussian_fit(beta, bfm, verbose=2, mpaxes=a1)

# fit beta's histogram with a mixture of Gaussians
alpha = 0.01
pstrength = 100
bfq = en.three_classes_GMM_fit(beta, bfm, alpha, pstrength,
                               verbose=2, mpaxes=a2)

# fit the null mode of beta with the robust method
efdr = en.ENN(beta)
efdr.learn()
efdr.plot(bar=0,mpaxes=a3)

mf.set_size_inches(15, 5, forward=True)
mp.show()
예제 #3
0
def compute_individual_regions(Fbeta, lbeta, coord, dmax, xyz,
                               affine=np.eye(4),  shape=None,  smin=5,
                               theta=3.0, verbose=0, reshuffle=0):
    """
    Compute the  Bayesian Structural Activation paterns -
    with statistical validation

    Parameters
    ----------
    Fbeta :  nipy.neurospin.graph.field.Field instance
          an  describing the spatial relationships
          in the dataset. nbnodes = Fbeta.V
    lbeta: an array of shape (nbnodes, subjects):
           the multi-subject statistical maps
    coord array of shape (nnodes,3):
          spatial coordinates of the nodes
    dmax float>0:
         expected cluster std in the common space in units of coord
    xyz array of shape (nnodes,3):
        the grid coordinates of the field
    affine=np.eye(4), array of shape(4,4)
         coordinate-defining affine transformation
    shape=None, tuple of length 3 defining the size of the grid
        implicit to the discrete ROI definition      
    smin = 5 (int): minimal size of the regions to validate them
    theta = 3.0 (float): first level threshold
    verbose=0: verbosity mode
    reshuffle=0: if nonzero, reshuffle the positions; this affects bf and gfc
    
    Returns
    -------
    bf list of nipy.neurospin.spatial_models.hroi.Nroi instances
       representing individual ROIs
       let nr be the number of terminal regions across subjects
    gf0, array of shape (nr)
         the mixture-based prior probability 
         that the terminal regions are true positives
    sub, array of shape (nr)
         the subject index associated with the terminal regions
    gfc, array of shape (nr, coord.shape[1])
         the coordinates of the of the terminal regions
    """
    bf = []
    gfc = []
    gf0 = []
    sub = []
    nsubj = lbeta.shape[1]
    nvox = lbeta.shape[0]

    for s in range(nsubj):
        
        # description in terms of blobs
        beta = np.reshape(lbeta[:,s],(nvox,1))
        Fbeta.set_field(beta)
        nroi = hroi.NROI_from_field(Fbeta, affine, shape, xyz, refdim=0,
                                    th=theta, smin=smin)
              
        if nroi!=None:
            nroi.set_discrete_feature_from_index('activation',beta)
            bfm = nroi.discrete_to_roi_features('activation','average')
            bfm = bfm[nroi.isleaf()]

            # get the regions position
            if reshuffle:
                nroi = nroi.reduce_to_leaves()
                ## randomize the positions by taking any local maximum of the image
                #idx, topidx = Fbeta.get_local_maxima()
                #temp = idx[np.argsort(np.random.rand(len(idx)))[:nroi.k]]
                temp = np.argsort(np.random.rand(nvox))[:nroi.k]

                bfc = coord[temp]
                nroi.parents = np.arange(nroi.k)
                nroi.set_roi_feature('position',bfc)
            else:
                nroi.set_discrete_feature_from_index('position',coord)
                bfc = nroi.discrete_to_roi_features('position','average')
                bfc = bfc[nroi.isleaf()]
            gfc.append(bfc)
            
            # compute the prior proba of being null
            beta = np.squeeze(beta)
            beta = beta[beta!=0]

            # use a GMM model...
            alpha = 0.01
            prior_strength = 100
            fixed_scale = True
            bfp = en.three_classes_GMM_fit(beta, bfm, alpha,
                                        prior_strength,verbose, fixed_scale)
            bf0 = bfp[:,1]
            
            ## ... or the emp_null heuristic
            #enn = en.ENN(beta)
            #enn.learn()
            #bf0 = np.reshape(enn.fdr(bfm),np.size(bf0))
            
            gf0.append(bf0)
            sub.append(s*np.ones(np.size(bfm)))

            nroi.set_roi_feature('label',np.arange(nroi.k))
        bf.append(nroi)    
    return bf, gf0, sub, gfc
예제 #4
0
pl.imshow(gam_gaus_pp[..., 0], cmap=pl.cm.hot)
pl.title('Gamma-Gaussian mixture,\n first component posterior proba.')
pl.colorbar()
pl.subplot(3, 3, 5)
pl.imshow(gam_gaus_pp[..., 1], cmap=pl.cm.hot)
pl.title('Gamma-Gaussian mixture,\n second component posterior proba.')
pl.colorbar()
pl.subplot(3, 3, 6)
pl.imshow(gam_gaus_pp[..., 2], cmap=pl.cm.hot)
pl.title('Gamma-Gaussian mixture,\n third component posterior proba.')
pl.colorbar()

################################################################################
# fit Beta's histogram with a mixture of Gaussians
alpha = 0.01
gaus_mix_pp = en.three_classes_GMM_fit(Beta, None, 
                                       alpha, prior_strength=100)
gaus_mix_pp = np.reshape(gaus_mix_pp, (dimx, dimy, 3))


pl.figure(fig.number)
pl.subplot(3, 3, 7)
pl.imshow(gaus_mix_pp[..., 0], cmap=pl.cm.hot)
pl.title('Gaussian mixture,\n first component posterior proba.')
pl.colorbar()
pl.subplot(3, 3, 8)
pl.imshow(gaus_mix_pp[..., 1], cmap=pl.cm.hot)
pl.title('Gaussian mixture,\n second component posterior proba.')
pl.colorbar()
pl.subplot(3, 3, 9)
pl.imshow(gaus_mix_pp[..., 2], cmap=pl.cm.hot)
pl.title('Gamma-Gaussian mixture,\n third component posterior proba.')
예제 #5
0
def compute_BSA_dev (Fbeta, lbeta, coord, dmax,  xyz, affine=np.eye(4), 
                    shape=None, thq=0.9,smin=5, ths=0, theta=3.0, g0=1.0,
                     bdensity=0, verbose=0):
    """
    Compute the  Bayesian Structural Activation paterns

    Parameters
    ----------
    Fbeta :   nipy.neurospin.graph.field.Field instance
          an  describing the spatial relationships
          in the dataset. nbnodes = Fbeta.V
    lbeta: an array of shape (nbnodes, subjects):
           the multi-subject statistical maps
    coord array of shape (nnodes,3):
          spatial coordinates of the nodes
    xyz array of shape (nnodes,3):
        the grid coordinates of the field
    affine=np.eye(4), array of shape(4,4)
         coordinate-defining affine transformation
    shape=None, tuple of length 3 defining the size of the grid
        implicit to the discrete ROI definition  
    thq = 0.5 (float): posterior significance threshold should be in [0,1]
    smin = 5 (int): minimal size of the regions to validate them
    theta = 3.0 (float): first level threshold
    g0 = 1.0 (float): constant values of the uniform density
       over the (compact) volume of interest
    bdensity=0 if bdensity=1, the variable p in ouput
               contains the likelihood of the data under H1 
               on the set of input nodes
    verbose=0 : verbosity mode

    Results
    -------
    crmap: array of shape (nnodes):
           the resulting group-level labelling of the space
    LR: a instance of sbf.Landmrak_regions that describes the ROIs found
        in inter-subject inference
        If no such thing can be defined LR is set to None
    bf: List of  nipy.neurospin.spatial_models.hroi.Nroi instances
        representing individual ROIs
    p: array of shape (nnodes):
       likelihood of the data under H1 over some sampling grid

    Note
    ----
    This version is probably the best one to date
    the intra subject Gamma-Gaussian MM has been replaces by a Gaussian MM
    which is probably mroe robust
    """
    bf = []
    gfc = []
    gf0 = []
    sub = []
    gc = []
    nsubj = lbeta.shape[1]
    nvox = lbeta.shape[0]

    # intra-subject analysis: get the blobs,
    # with their position and their significance
    for s in range(nsubj):       
        # description in terms of blobs
        beta = np.reshape(lbeta[:,s],(nvox,1))
        Fbeta.set_field(beta)
        nroi = hroi.NROI_from_field(Fbeta, affine, shape, xyz, refdim=0,
                                    th=theta,smin=smin)
        bf.append(nroi)
        
        if nroi!=None:
            sub.append(s*np.ones(nroi.k))
            nroi.set_discrete_feature_from_index('activation',beta)
            bfm = nroi.discrete_to_roi_features('activation','average')

            # compute the region position
            nroi.set_discrete_feature_from_index('position',coord)
            bfc = nroi.discrete_to_roi_features('position',
                                                'cumulated_average')           
            gfc.append(bfc)

            # compute the prior proba of being null
            beta = np.squeeze(beta)
            beta = beta[beta!=0]
            alpha = 0.01
            prior_strength = 100
            fixed_scale = True
            bfp = en.three_classes_GMM_fit(beta, bfm, alpha,
                                        prior_strength,verbose,fixed_scale)
            bf0 = bfp[:,1]
            gf0.append(bf0)
            
    crmap = -np.ones(nvox, np.int)
    u = []
    AF = []
    p = np.zeros(nvox)
    if len(sub)<1:
        return crmap,AF,bf,u,p

    # inter-subject analysis
    # use the DPMM (core part)
    sub = np.concatenate(sub).astype(np.int)
    gfc = np.concatenate(gfc)
    gf0 = np.concatenate(gf0)
    p = np.zeros(np.size(nvox))
    g1 = g0
    dof = 0
    prior_precision =  1./(dmax*dmax)*np.ones((1,3), np.int)

    if bdensity:
        spatial_coords = coord
    else:
        spatial_coords = gfc
            
    p,q =  fc.fdp(gfc, 0.5, g0, g1, dof,prior_precision, 1-gf0,
                  sub, 100, spatial_coords,10,1000)
    valid = q>thq
    if verbose:
        import matplotlib.pylab as mp
        mp.figure()
        mp.plot(1-gf0,q,'.')    
        print np.sum(valid),np.size(valid)

    # remove non-significant regions
    for s in range(nsubj):
        bfs = bf[s]
        if bfs!=None:
            valids = valid[sub==s]
            valids = bfs.propagate_upward_and(valids)
            bfs.clean(valids)
            bfs.merge_descending()
            
            # re-compute the region position
            bfs.set_discrete_feature_from_index('position',coord)
            bfc = bfs.discrete_to_roi_features('position',
                                               'cumulated_average')
            # Alan's choice
            #beta = np.reshape(lbeta[:,s],(nvox,1))
            #bfsc = coord[bfs.feature_argmax(beta)]
            #bfs.set_roi_feature(bfsc,'position')

    # compute a model of between-regions associations
    gc = _hierarchical_asso(bf,np.sqrt(2)*dmax)

    # Infer the group-level clusters
    if gc == []:
        return crmap,AF,bf,p

    # either replicator dynamics or agglomerative clustering
    #u = sbf.segment_graph_rd(gc,1)
    u,cost = average_link_graph_segment(gc,0.1,gc.V*1.0/nsubj)

    q = 0
    for s in range(nsubj):
        if bf[s]!=None:
            bf[s].set_roi_feature('label',u[q:q+bf[s].k])
            q += bf[s].k
    
    LR,mlabel = sbf.build_LR(bf,ths)
    if LR!=None:
        crmap = LR.map_label(coord,pval = 0.95,dmax=dmax)

    return crmap,LR,bf,p