Esempio n. 1
0
def test_vbgmm(verbose=0):
    # perform the estimation of a gmm
    n = 100
    dim = 2
    x = nr.randn(n, dim)
    x[:30] += 2
    k = 2
    b = VBGMM(k, dim)
    b.guess_priors(x)
    b.initialize(x)
    b.estimate(x, verbose=verbose)
    z = b.map_label(x)

    # fixme : find a less trivial test
    assert z.max() + 1 == b.k
Esempio n. 2
0
def test_vbgmm_select(kmax=6, verbose=0):
    # perform the estimation of a gmm
    n = 100
    dim = 3
    x = nr.randn(n, dim)
    x[:30] += 2
    show = 0
    be = -np.infty
    for k in range(1, kmax):
        b = VBGMM(k, dim)
        b.guess_priors(x)
        b.initialize(x)
        b.estimate(x)
        z = b.map_label(x)
        ek = b.evidence(x)
        if verbose:
            print k, ek
        if ek > be:
            be = ek
            bestk = k
    assert bestk < 3
Esempio n. 3
0
def three_classes_GMM_fit(x, test=None, alpha=0.01, prior_strength=100,
                          verbose=0, fixed_scale=False, mpaxes=None, bias=0, 
                          theta=0, return_estimator=False):
    """
     Fit the data with a 3-classes Gaussian Mixture Model,
    i.e. computing some probability that the voxels of a certain map
    are in class disactivated, null or active

    
    Parameters
    ----------
    x array of shape (nvox,1): the map to be analysed
    test=None array of shape(nbitems,1):
      the test values for which the p-value needs to be computed
      by default, test=x
    alpha = 0.01 the prior weights of the positive and negative classes
    prior_strength = 100 the confidence on the prior
                   (should be compared to size(x))
    verbose=0 : verbosity mode
    fixed_scale = False, boolean, variance parameterization
                if True, the variance is locked to 1
                otherwise, it is estimated from the data
    mpaxes=None: axes handle used to plot the figure in verbose mode
                 if None, new axes are created
    bias = 0: allows a recaling of the posterior probability
         that takes into account the thershold theta. Not rigorous.
    theta = 0 the threshold used to correct the posterior p-values
          when bias=1; normally, it is such that test>theta
          note that if theta = -np.infty, the method has a standard behaviour
    return_estimator: boolean, optional
            If return_estimator is true, the estimator object is
            returned.
    
    Results
    -------
    bfp : array of shape (nbitems,3):
        the posterior probability of each test item belonging to each component
        in the GMM (sum to 1 across the 3 classes)
        if np.size(test)==0, i.e. nbitem==0, None is returned
    estimator : nipy.neurospin.clustering.GMM object
        The estimator object, returned only if return_estimator is true.

    Note
    ----
    Our convention is that
    - class 1 represents the negative class
    - class 2 represenst the null class
    - class 3 represents the positsive class
    """
    nvox = np.size(x)
    x = np.reshape(x,(nvox,1))
    if test==None:
        test = x
    if np.size(test)==0:
        return None
    
    from nipy.neurospin.clustering.bgmm import VBGMM
    from nipy.neurospin.clustering.gmm import grid_descriptor
    
    sx = np.sort(x,0)   
    nclasses=3
    
    # set the priors from a reasonable model of the data (!)

    # prior means 
    mb0 = np.mean(sx[:alpha*nvox])
    mb2 = np.mean(sx[(1-alpha)*nvox:])
    prior_means = np.reshape(np.array([mb0,0,mb2]),(nclasses,1))
    if fixed_scale:
        prior_scale = np.ones((nclasses,1,1)) * 1./(prior_strength)
    else:
        prior_scale = np.ones((nclasses,1,1)) * 1./(prior_strength*np.var(x))
    prior_dof = np.ones(nclasses) * prior_strength
    prior_weights = np.array([alpha,1-2*alpha,alpha]) * prior_strength
    prior_shrinkage = np.ones(nclasses) * prior_strength

    # instantiate the class and set the priors
    BayesianGMM = VBGMM(nclasses,1,prior_means,prior_scale,
                        prior_weights, prior_shrinkage,prior_dof)
    BayesianGMM.set_priors(prior_means, prior_weights, prior_scale,
                           prior_dof, prior_shrinkage)

    # estimate the model
    BayesianGMM.estimate(x,delta = 1.e-8,verbose=verbose)

    # create a sampling grid
    if (verbose or bias):
        gd = grid_descriptor(1) 
        gd.getinfo([x.min(),x.max()],100)
        gdm = gd.make_grid().squeeze()
        lj = BayesianGMM.likelihood(gd.make_grid())
    
    # estimate the prior weights
    bfp = BayesianGMM.likelihood(test)
    if bias:
        lw = np.sum(lj[gdm>theta],0)
        weights = BayesianGMM.weights/(BayesianGMM.weights.sum())
        bfp = (lw/weights)*BayesianGMM.slikelihood(test)
    
    if verbose>1:
        BayesianGMM.show_components(x,gd,lj,mpaxes)

    bfp = (bfp.T/bfp.sum(1)).T
    if not return_estimator:
        return bfp
    else:
        return bfp, BayesianGMM
Esempio n. 4
0
def test_evidence(verbose=0,k=1):
    """
    Compare the evidence estimated by Chib's method
    with the variational evidence (free energy)
    fixme : this one really takes time
    """
    n=50
    dim=2
    x = nr.randn(n,dim)
    x[:15] += 3
    
    b = VBGMM(k,dim)
    b.guess_priors(x)
    b.initialize(x)
    b.estimate(x)
    vbe = b.evidence(x),
    if verbose:
        print 'vb: ',vbe, 

    niter = 1000
    b = BGMM(k,dim)
    b.guess_priors(x)
    b.initialize(x)
    b.sample(x,100)
    w,cent,prec,pz = b.sample(x, niter=niter, mem=1)
    bplugin =  BGMM(k, dim, cent, prec, w)
    bplugin.guess_priors(x)
    bfchib = bplugin.bayes_factor(x, pz.astype(np.int), 1)
    if verbose:
        print ' chib:', bfchib
    assert(bfchib>vbe)
Esempio n. 5
0
def test_evidence(verbose=0, k=1):
    # Compare the evidence estimated by Chib's method with the
    # variational evidence (free energy) fixme : this one really takes
    # time
    n = 100
    dim = 2
    x = nr.randn(n, dim)
    x[:30] += 3
    show = 0

    b = VBGMM(k, dim)
    b.guess_priors(x)
    b.initialize(x)
    b.estimate(x)
    vbe = (b.evidence(x),)
    if verbose:
        print "vb: ", vbe,

    niter = 1000
    b = BGMM(k, dim)
    b.guess_priors(x)
    b.initialize(x)
    b.sample(x, 100)
    w, cent, prec, pz = b.sample(x, niter=niter, mem=1)
    bplugin = BGMM(k, dim, cent, prec, w)
    bplugin.guess_priors(x)
    bfchib = bplugin.Bfactor(x, pz.astype(np.int), 1)
    if verbose:
        print " chib:", bfchib
    assert bfchib > vbe