def isolated(XYZ, k=18):
    """
    Outputs an index I of isolated points from their integer coordinates,
    XYZ (3, n), and under k-connectivity, k = 6, 18 or 26.
    """
    label = wgraph_from_3d_grid(XYZ.T, k).cc()
    # Isolated points
    ncc = label.max() + 1
    p = XYZ.shape[1]
    size = np.zeros(ncc, float)
    ones = np.ones((p, 1), float)
    add_lines(ones, size.reshape(ncc, 1), label)
    return np.where(size[label] == 1)[0]
def isolated(XYZ, k=18):
    """
    Outputs an index I of isolated points from their integer coordinates,
    XYZ (3, n), and under k-connectivity, k = 6, 18 or 26.
    """
    label = wgraph_from_3d_grid(XYZ.T, k).cc()
    # Isolated points
    ncc = label.max() + 1
    p = XYZ.shape[1]
    size = np.zeros(ncc, float)
    ones = np.ones((p, 1), float)
    add_lines(ones, size.reshape(ncc, 1), label)
    return np.where(size[label] == 1)[0]
 def test_onesample_graph(self):
     data, vardata, XYZ = make_data()
     G = wgraph_from_3d_grid(XYZ.T)
     # rfx calibration
     P = pt.permutation_test_onesample_graph(data, G, ndraws=ndraws)
     c = [(P.random_Tvalues[P.ndraws * (0.95)], None)]
     r = np.ones(data.shape[1], int)
     r[data.shape[1] / 2:] *= 10
     # mfx calibration
     P = pt.permutation_test_onesample_graph(
         data, G, vardata=vardata, stat_id="student_mfx", ndraws=ndraws)
     p_values, cluster_results, region_results = P.calibrate(
         nperms=nperms, clusters=c, regions=[r])
Exemple #4
0
 def test_onesample_graph(self):
     data, vardata, XYZ = make_data()
     G = wgraph_from_3d_grid(XYZ.T)
     # rfx calibration
     P = pt.permutation_test_onesample_graph(data, G, ndraws=ndraws)
     c = [(P.random_Tvalues[P.ndraws*(0.95)],None)]
     r = np.ones(data.shape[1],int)
     r[data.shape[1]/2:] *= 10
     #p_values, cluster_results, region_results = P.calibrate(nperms=100, clusters=c, regions=[r])
     # mfx calibration
     P = pt.permutation_test_onesample_graph(
         data, G, vardata=vardata, stat_id="student_mfx", ndraws=ndraws)
     p_values, cluster_results, region_results = P.calibrate(
         nperms=nperms, clusters=c, regions=[r])
Exemple #5
0
def smatrix_from_3d_idx(ijk, nn=18):
    """Create a sparse adjacency matrix from 3d index system

    Parameters
    ----------
    idx:array of shape (n_samples, 3), type int
        indexes of certain positions in a 3d space
    nn: int, optional
        3d neighboring system to be chosen within {6, 18, 26}

    Returns
    -------
    coo_mat: a sparse coo matrix,
             adjacency of the neighboring system
    """
    G = wgraph_from_3d_grid(ijk, nn)
    return G.to_coo_matrix()
Exemple #6
0
def extract_clusters_from_thresh(T, XYZ, th, k=18):
    """
    Extract clusters from statistical map
    above specified threshold
    In:  T      (p)     statistical map
         XYZ    (3,p)   voxels coordinates
         th     <float> threshold
         k      <int>   the number of neighbours considered. (6,18 or 26)
    Out: labels (p)     cluster labels
    """
    labels = -np.ones(len(T), int)
    I = np.where(T >= th)[0]
    if len(I) > 0:
        SupraThreshXYZ = XYZ[:, I]
        CC_label = wgraph_from_3d_grid(SupraThreshXYZ.T, k).cc()
        labels[I] = CC_label
    return labels
Exemple #7
0
def extract_clusters_from_thresh(T, XYZ, th, k=18):
    """
    Extract clusters from statistical map
    above specified threshold
    In:  T      (p)     statistical map
         XYZ    (3,p)   voxels coordinates
         th     <float> threshold
         k      <int>   the number of neighbours considered. (6,18 or 26)
    Out: labels (p)     cluster labels
    """
    labels = -np.ones(len(T), int)
    I = np.where(T >= th)[0]
    if len(I) > 0:
        SupraThreshXYZ = XYZ[:, I]
        CC_label = wgraph_from_3d_grid(SupraThreshXYZ.T, k).cc()
        labels[I] = CC_label
    return labels
Exemple #8
0
def smatrix_from_3d_idx(ijk, nn=18):
    """Create a sparse adjacency matrix from 3d index system

    Parameters
    ----------
    idx:array of shape (n_samples, 3), type int
        indexes of certain positions in a 3d space
    nn: int, optional
        3d neighboring system to be chosen within {6, 18, 26}

    Returns
    -------
    coo_mat: a sparse coo matrix,
             adjacency of the neighboring system

    """
    G = wgraph_from_3d_grid(ijk, nn)
    return G.to_coo_matrix()
Exemple #9
0
def _extract_clusters_from_diam(labels, T, XYZ, th, diam, k, nCC, CClabels):
    """
    This recursive function modifies the `labels` input array.
    """
    clust_label = 0
    for i in range(nCC):
        # print "Searching connected component ", i, " out of ", nCC
        I = np.where(CClabels == i)[0]
        extCC = len(I)
        if extCC <= (diam + 1) ** 3:
            diamCC = max_dist(XYZ, I, I)
        else:
            diamCC = diam + 1
        if diamCC <= diam:
            labels[I] = np.zeros(extCC, int) + clust_label
            # print "cluster ", clust_label, ", diam = ", diamCC
            # print "ext = ", len(I), ", diam = ", max_dist(XYZ,I,I)
            clust_label += 1
        else:
            # build the field
            p = len(T[I])
            F = field_from_graph_and_data(wgraph_from_3d_grid(XYZ[:, I].T, k), np.reshape(T[I], (p, 1)))
            # compute the blobs
            idx, parent, label = F.threshold_bifurcations(0, th)
            nidx = np.size(idx)
            height = np.array([np.ceil(np.sum(label == i) ** (1.0 / 3)) for i in np.arange(nidx)])
            # root = nidx-1
            root = np.where(np.arange(nidx) == parent)[0]
            # Can constraint be met within current region?
            Imin = I[T[I] >= height[root]]
            extmin = len(Imin)
            if extmin <= (diam + 1) ** 3:
                dmin = max_dist(XYZ, Imin, Imin)
            else:
                dmin = diam + 1
            if dmin <= diam:  # If so, search for the largest cluster meeting the constraint
                Iclust = Imin  # Smallest cluster
                J = I[T[I] < height[root]]  # Remaining voxels
                argsortTJ = np.argsort(T[J])[::-1]  # Sorted by decreasing T values
                l = 0
                L = np.array([J[argsortTJ[l]]], int)
                diameter = dmin
                new_diameter = max(dmin, max_dist(XYZ, Iclust, L))
                while new_diameter <= diam:
                    # print "diameter = " + str(new_diameter)
                    # sys.stdout.flush()
                    Iclust = np.concatenate((Iclust, L))
                    diameter = new_diameter
                    # print "diameter = ", diameter
                    l += 1
                    L = np.array([J[argsortTJ[l]]], int)
                    new_diameter = max(diameter, max_dist(XYZ, Iclust, L))
                labels[Iclust] = np.zeros(len(Iclust), int) + clust_label
                # print "cluster ", clust_label, ", diam = ", diameter
                # print "ext = ", len(Iclust), ", diam = ", max_dist(XYZ,Iclust,Iclust)
                clust_label += 1
            else:  # If not, search inside sub-regions
                # print "Searching inside sub-regions "
                Irest = I[T[I] > height[root]]
                rest_labels = extract_clusters_from_diam(T[Irest], XYZ[:, Irest], th, diam, k)
                rest_labels[rest_labels >= 0] += clust_label
                clust_label = rest_labels.max() + 1
                labels[Irest] = rest_labels
    return labels
Exemple #10
0
def extract_clusters_from_diam(T, XYZ, th, diam, k=18):
    """
    Extract clusters from a statistical map
    under diameter constraint
    and above given threshold
    In:  T      (p)     statistical map
         XYZ    (3,p)   voxels coordinates
         th     <float> minimum threshold
         diam   <int>   maximal diameter (in voxels)
         k      <int>   the number of neighbours considered. (6,18 or 26)
    Out: labels (p)     cluster labels
    """
    CClabels = extract_clusters_from_thresh(T, XYZ, th, k)
    nCC = CClabels.max() + 1
    labels = -np.ones(len(CClabels), int)
    clust_label = 0
    for i in xrange(nCC):
        #print "Searching connected component ", i, " out of ", nCC
        I = np.where(CClabels == i)[0]
        extCC = len(I)
        if extCC <= (diam + 1)**3:
            diamCC = max_dist(XYZ, I, I)
        else:
            diamCC = diam + 1
        if diamCC <= diam:
            labels[I] = np.zeros(extCC, int) + clust_label
            #print "cluster ", clust_label, ", diam = ", diamCC
            #print "ext = ", len(I), ", diam = ", max_dist(XYZ,I,I)
            clust_label += 1
        else:
            # build the field
            p = len(T[I])
            F = field_from_graph_and_data(wgraph_from_3d_grid(XYZ[:, I].T, k),
                                          np.reshape(T[I], (p, 1)))
            # compute the blobs
            idx, parent, label = F.threshold_bifurcations(0, th)
            nidx = np.size(idx)
            height = np.array([
                np.ceil(np.sum(label == i)**(1. / 3)) for i in np.arange(nidx)
            ])
            #root = nidx-1
            root = np.where(np.arange(nidx) == parent)[0]
            # Can constraint be met within current region?
            Imin = I[T[I] >= height[root]]
            extmin = len(Imin)
            if extmin <= (diam + 1)**3:
                dmin = max_dist(XYZ, Imin, Imin)
            else:
                dmin = diam + 1
            if dmin <= diam:  # If so, search for the largest cluster meeting the constraint
                Iclust = Imin  # Smallest cluster
                J = I[T[I] < height[root]]  # Remaining voxels
                argsortTJ = np.argsort(
                    T[J])[::-1]  # Sorted by decreasing T values
                l = 0
                L = np.array([J[argsortTJ[l]]], int)
                diameter = dmin
                new_diameter = max(dmin, max_dist(XYZ, Iclust, L))
                while new_diameter <= diam:
                    #print "diameter = " + str(new_diameter)
                    #sys.stdout.flush()
                    Iclust = np.concatenate((Iclust, L))
                    diameter = new_diameter
                    #print "diameter = ", diameter
                    l += 1
                    L = np.array([J[argsortTJ[l]]], int)
                    new_diameter = max(diameter, max_dist(XYZ, Iclust, L))
                labels[Iclust] = np.zeros(len(Iclust), int) + clust_label
                #print "cluster ", clust_label, ", diam = ", diameter
                #print "ext = ", len(Iclust), ", diam = ", max_dist(XYZ,Iclust,Iclust)
                clust_label += 1
            else:  # If not, search inside sub-regions
                #print "Searching inside sub-regions "
                Irest = I[T[I] > height[root]]
                rest_labels = extract_clusters_from_diam(
                    T[Irest], XYZ[:, Irest], th, diam, k)
                rest_labels[rest_labels >= 0] += clust_label
                clust_label = rest_labels.max() + 1
                labels[Irest] = rest_labels
    return labels
Exemple #11
0
def _extract_clusters_from_diam(labels, T, XYZ, th, diam, k,
                                nCC, CClabels):
    """
    This recursive function modifies the `labels` input array.
    """
    clust_label = 0
    for i in range(nCC):
        #print "Searching connected component ", i, " out of ", nCC
        I = np.where(CClabels==i)[0]
        extCC = len(I)
        if extCC <= (diam+1)**3:
            diamCC = max_dist(XYZ,I,I)
        else:
            diamCC = diam+1
        if diamCC <= diam:
            labels[I] = np.zeros(extCC,int) + clust_label
            #print "cluster ", clust_label, ", diam = ", diamCC
            #print "ext = ", len(I), ", diam = ", max_dist(XYZ,I,I)
            clust_label += 1
        else:
            # build the field
            p = len(T[I])
            F = field_from_graph_and_data(
                wgraph_from_3d_grid(XYZ[:, I].T, k), np.reshape(T[I],(p,1)))
            # compute the blobs
            idx, parent,label = F.threshold_bifurcations(0,th)
            nidx = np.size(idx)
            height = np.array([np.ceil(np.sum(label == i) ** (1./3))
                               for i in np.arange(nidx)])
            #root = nidx-1
            root = np.where(np.arange(nidx)==parent)[0]
            # Can constraint be met within current region?
            Imin = I[T[I]>=height[root]]
            extmin = len(Imin)
            if extmin <= (diam+1)**3:
                dmin = max_dist(XYZ,Imin,Imin)
            else:
                dmin = diam+1
            if dmin <= diam:# If so, search for the largest cluster meeting the constraint
                Iclust = Imin # Smallest cluster
                J = I[T[I]<height[root]] # Remaining voxels
                argsortTJ = np.argsort(T[J])[::-1] # Sorted by decreasing T values
                l = 0
                L = np.array([J[argsortTJ[l]]],int)
                diameter = dmin
                new_diameter = max(dmin,max_dist(XYZ,Iclust,L))
                while new_diameter <= diam:
                    #print "diameter = " + str(new_diameter)
                    #sys.stdout.flush()
                    Iclust = np.concatenate((Iclust,L))
                    diameter = new_diameter
                    #print "diameter = ", diameter
                    l += 1
                    L = np.array([J[argsortTJ[l]]],int)
                    new_diameter = max(diameter,max_dist(XYZ,Iclust,L))
                labels[Iclust] = np.zeros(len(Iclust),int) + clust_label
                #print "cluster ", clust_label, ", diam = ", diameter
                #print "ext = ", len(Iclust), ", diam = ", max_dist(XYZ,Iclust,Iclust)
                clust_label += 1
            else:# If not, search inside sub-regions
                #print "Searching inside sub-regions "
                Irest = I[T[I]>height[root]]
                rest_labels = extract_clusters_from_diam(T[Irest],XYZ[:,Irest],th,diam,k)
                rest_labels[rest_labels>=0] += clust_label
                clust_label = rest_labels.max() + 1
                labels[Irest] = rest_labels
    return labels
Exemple #12
0
def extract_clusters_from_diam(T,XYZ,th,diam,k=18):
    """
    Extract clusters from a statistical map
    under diameter constraint
    and above given threshold
    In:  T      (p)     statistical map
         XYZ    (3,p)   voxels coordinates
         th     <float> minimum threshold
         diam   <int>   maximal diameter (in voxels)
         k      <int>   the number of neighbours considered. (6,18 or 26)
    Out: labels (p)     cluster labels
    """
    CClabels = extract_clusters_from_thresh(T,XYZ,th,k)
    nCC = CClabels.max() + 1
    labels = -np.ones(len(CClabels),int)
    clust_label = 0
    for i in xrange(nCC):
        #print "Searching connected component ", i, " out of ", nCC
        I = np.where(CClabels==i)[0]
        extCC = len(I)
        if extCC <= (diam+1)**3:
            diamCC = max_dist(XYZ,I,I)
        else:
            diamCC = diam+1
        if diamCC <= diam:
            labels[I] = np.zeros(extCC,int) + clust_label
            #print "cluster ", clust_label, ", diam = ", diamCC
            #print "ext = ", len(I), ", diam = ", max_dist(XYZ,I,I)
            clust_label += 1
        else:
            # build the field
            p = len(T[I])
            F = field_from_graph_and_data(
                wgraph_from_3d_grid(XYZ[:, I].T, k), np.reshape(T[I],(p,1)))
            # compute the blobs
            idx, parent,label = F.threshold_bifurcations(0,th)
            nidx = np.size(idx)
            height = np.array([np.ceil(np.sum(label == i) ** (1./3)) 
                               for i in np.arange(nidx)])
            #root = nidx-1
            root = np.where(np.arange(nidx)==parent)[0]
            # Can constraint be met within current region?
            Imin = I[T[I]>=height[root]]
            extmin = len(Imin)
            if extmin <= (diam+1)**3:
                dmin = max_dist(XYZ,Imin,Imin)
            else:
                dmin = diam+1
            if dmin <= diam:# If so, search for the largest cluster meeting the constraint
                Iclust = Imin # Smallest cluster
                J = I[T[I]<height[root]] # Remaining voxels
                argsortTJ = np.argsort(T[J])[::-1] # Sorted by decreasing T values
                l = 0
                L = np.array([J[argsortTJ[l]]],int)
                diameter = dmin
                new_diameter = max(dmin,max_dist(XYZ,Iclust,L))
                while new_diameter <= diam:
                    #print "diameter = " + str(new_diameter)
                    #sys.stdout.flush()
                    Iclust = np.concatenate((Iclust,L))
                    diameter = new_diameter
                    #print "diameter = ", diameter
                    l += 1
                    L = np.array([J[argsortTJ[l]]],int)
                    new_diameter = max(diameter,max_dist(XYZ,Iclust,L))
                labels[Iclust] = np.zeros(len(Iclust),int) + clust_label
                #print "cluster ", clust_label, ", diam = ", diameter
                #print "ext = ", len(Iclust), ", diam = ", max_dist(XYZ,Iclust,Iclust)
                clust_label += 1
            else:# If not, search inside sub-regions
                #print "Searching inside sub-regions "
                Irest = I[T[I]>height[root]]
                rest_labels = extract_clusters_from_diam(T[Irest],XYZ[:,Irest],th,diam,k)
                rest_labels[rest_labels>=0] += clust_label
                clust_label = rest_labels.max() + 1
                labels[Irest] = rest_labels
    return labels