Exemple #1
0
def parcellate_voronoi_vol(mask, nb_parcels, seeds=None):
    """
    Produce a parcellation from a Voronoi diagram built on random seeds.
    The number of seeds is equal to the nb of parcels.
    Seed are randomly placed within the mask, expect on edge positions

    Args:
        - mask (numpy.ndarray): binary 3D array of valid position to parcellate
        - nb_parcels (int): the required number of parcels
        - seeds: TODO

    Return:
        - the parcellation (numpy.ndarray): a 3D array of integers
        -
    """
    parcellation = np.zeros(mask.shape, dtype=int)
    nvox = (mask !=0).sum()
    for cc_mask in mg.split_mask_into_cc_iter(mask!=0):
        # compute the required number of parcels within the current CC:
        size_cc = cc_mask.sum()
        cc_np = max(int(np.round(nb_parcels * size_cc / (nvox*1.))), 1)
        pyhrf.verbose(2, 'Treating a connected component (CC) of %d positions' \
                          %cc_mask.sum())
        if cc_mask.sum() < 6:
            continue
        if seeds is None:
            # perform voronoi on random seeds

            eroded_mask = peelVolume3D(cc_mask)
            eroded_mask_size = eroded_mask.sum()
            if eroded_mask_size < nb_parcels: #do no erode, mask too small
                eroded_mask_size = nvox
                eroded_mask = mask.copy()
            cc_seeds = np.random.randint(0,eroded_mask_size, cc_np)
            mask_for_seed = np.zeros(eroded_mask_size, dtype=int)
            mask_for_seed[cc_seeds] = 1
            mask_for_seed  = expand_array_in_mask(mask_for_seed, eroded_mask)
        else:
            mask_for_seed = seeds * cc_mask

        pyhrf.verbose(2, 'Nb of seeds in current CC: %d' \
                          %mask_for_seed.sum())
        cc_parcellation = voronoi(np.vstack(np.where(cc_mask)).T,
                                  np.vstack(np.where(mask_for_seed)).T) + 1
        pyhrf.verbose(3, 'CC parcellation labels: %s' \
                          %str(np.unique(cc_parcellation)))
        maxp = parcellation.max()
        parcellation += expand_array_in_mask(cc_parcellation + maxp, cc_mask)
        pyhrf.verbose(3, 'Current parcellation labels: %s' \
                          %str(np.unique(parcellation)))
    pyhrf.verbose(1, 'voronoi parcellation: %s, %s' \
                      %(str(parcellation.shape), str(parcellation.dtype)))

    return parcellation
Exemple #2
0
def parcellate_voronoi_vol(mask, nb_parcels, seeds=None):
    """
    Produce a parcellation from a Voronoi diagram built on random seeds.
    The number of seeds is equal to the nb of parcels.
    Seed are randomly placed within the mask, expect on edge positions

    Args:
        - mask (numpy.ndarray): binary 3D array of valid position to parcellate
        - nb_parcels (int): the required number of parcels
        - seeds: TODO

    Return:
        - the parcellation (numpy.ndarray): a 3D array of integers
        -
    """
    parcellation = np.zeros(mask.shape, dtype=int)
    nvox = (mask != 0).sum()
    for cc_mask in mg.split_mask_into_cc_iter(mask != 0):
        # compute the required number of parcels within the current CC:
        size_cc = cc_mask.sum()
        cc_np = max(int(np.round(nb_parcels * size_cc / (nvox * 1.))), 1)
        logger.info('Treating a connected component (CC) of %d positions',
                    cc_mask.sum())
        if cc_mask.sum() < 6:
            continue
        if seeds is None:
            # perform voronoi on random seeds

            eroded_mask = peelVolume3D(cc_mask)
            eroded_mask_size = eroded_mask.sum()
            if eroded_mask_size < nb_parcels:  # do no erode, mask too small
                eroded_mask_size = nvox
                eroded_mask = mask.copy()
            cc_seeds = np.random.randint(0, eroded_mask_size, cc_np)
            mask_for_seed = np.zeros(eroded_mask_size, dtype=int)
            mask_for_seed[cc_seeds] = 1
            mask_for_seed = expand_array_in_mask(mask_for_seed, eroded_mask)
        else:
            mask_for_seed = seeds * cc_mask

        logger.info('Nb of seeds in current CC: %d', mask_for_seed.sum())
        cc_parcellation = voronoi(
            np.vstack(np.where(cc_mask)).T,
            np.vstack(np.where(mask_for_seed)).T) + 1
        logger.info('CC parcellation labels: %s',
                    str(np.unique(cc_parcellation)))
        maxp = parcellation.max()
        parcellation += expand_array_in_mask(cc_parcellation + maxp, cc_mask)
        logger.info('Current parcellation labels: %s',
                    str(np.unique(parcellation)))
    logger.info('voronoi parcellation: %s, %s', str(parcellation.shape),
                str(parcellation.dtype))

    return parcellation
def perm_prfx(domain, graphs, features, nb_parcel, ldata, initial_mask=None,
              nb_perm=100, niter=5, dmax=10., lamb=100.0, chunksize=1.e5,
              verbose=1):
    """
    caveat: assumes that the functional dimension is 1
    """
    from ..utils.reproducibility_measures import ttest
    # permutations for the assesment of the results
    prfx0 = []
    adim = domain.coord.shape[1]
    nb_subj = len(ldata)
    for q in range(nb_perm):
        feature = []
        sldata = []
        for s in range(nb_subj):
            lf = features[s].copy()
            swap = (rand() > 0.5) * 2 - 1
            lf[:, 0:-adim] = swap * lf[:, 0:-adim]
            sldata.append(swap * ldata[s])
            feature.append(lf)

        # optimization part
        all_labels, proto_anat = _optim_hparcel(
            feature, domain, graphs, nb_parcel, lamb, dmax, niter,
            initial_mask, chunksize=chunksize)
        labels = - np.ones((domain.size, nb_subj)).astype(np.int)
        for s in range(nb_subj):
            labels[initial_mask[:, s] > -1, s] = all_labels[s]

        # compute the group-level labels
        template_labels = voronoi(domain.coord, proto_anat)

        # create the parcellation
        pcl = MultiSubjectParcellation(domain, individual_labels=labels,
                                       template_labels=template_labels)
        pdata = pcl.make_feature('functional',
                                 np.rollaxis(np.array(ldata), 1, 0))
        prfx = ttest(np.squeeze(pdata))
        if verbose:
            print q, prfx.max(0)
        prfx0.append(prfx.max(0))

    return prfx0
def hparcel(domain, ldata, nb_parcel, nb_perm=0, niter=5, mu=10., dmax=10.,
            lamb=100.0, chunksize=1.e5, verbose=0, initial_mask=None):
    """
    Function that performs the parcellation by optimizing the
    inter-subject similarity while retaining the connectedness
    within subject and some consistency across subjects.

    Parameters
    ----------
    domain: discrete_domain.DiscreteDomain instance,
            yields all the spatial information on the parcelled domain
    ldata: list of (n_subj) arrays of shape (domain.size, dim)
           the feature data used to inform the parcellation
    nb_parcel: int,
               the number of parcels
    nb_perm: int, optional,
             the number of times the parcellation and prfx
             computation is performed on sign-swaped data
    niter: int, optional,
           number of iterations to obtain the convergence of the method
           information in the clustering algorithm
    mu: float, optional,
        relative weight of anatomical information
    dmax: float optional,
          radius of allowed deformations
    lamb: float optional
          parameter to control the relative importance of space vs function
    chunksize; int, optional
               number of points used in internal sub-sampling
    verbose: bool, optional,
             verbosity mode
    initial_mask: array of shape (domain.size, nb_subj), optional
                  initial subject-depedent masking of the domain

    Results
    -------
    Pa: the resulting parcellation structure appended with the labelling
    """
    # a various parameters
    nbvox = domain.size
    nb_subj = len(ldata)
    if initial_mask is None:
        initial_mask = np.ones((nbvox, nb_subj), np.int)

    graphs = []
    feature = []

    for s in range(nb_subj):
        # build subject-specific models of the data
        lnvox = np.sum(initial_mask[:, s] > - 1)
        lac = domain.coord[initial_mask[:, s] > - 1]
        beta = np.reshape(ldata[s], (lnvox, ldata[s].shape[1]))
        lf = np.hstack((beta, mu * lac / (1.e-15 + np.std(domain.coord, 0))))
        feature.append(lf)
        g = wgraph_from_coo_matrix(domain.topology)
        g.remove_trivial_edges()
        graphs.append(g)

    # main function
    all_labels, proto_anat = _optim_hparcel(
        feature, domain, graphs, nb_parcel, lamb, dmax, niter, initial_mask,
        chunksize=chunksize, verbose=verbose)

    # write the individual labelling
    labels = - np.ones((nbvox, nb_subj)).astype(np.int)
    for s in range(nb_subj):
        labels[initial_mask[:, s] > -1, s] = all_labels[s]

    # compute the group-level labels
    template_labels = voronoi(domain.coord, proto_anat)

    # create the parcellation
    pcl = MultiSubjectParcellation(domain, individual_labels=labels,
                                   template_labels=template_labels,
                                   nb_parcel=nb_parcel)
    pcl.make_feature('functional', np.rollaxis(np.array(ldata), 1, 0))

    if nb_perm > 0:
        prfx0 = perm_prfx(domain, graphs, feature, nb_parcel, ldata,
                          initial_mask, nb_perm, niter, dmax, lamb, chunksize)
        return pcl, prfx0
    else:
        return pcl