示例#1
0
def mesh_average(filenames, fun=np.add, output_surfstat=False):
    """Average, minimum, or maximum of surfaces.

    Args:
        filenames (2D numpy array): Numpy array of filenames of surfaces or BSPolyData objects.

        fun : function handle to apply to two surfaces, e.g.
        np.add (default) will give the average of the surfaces,
        np.fmin or np.fmax will give the min or max, respectively.

        output_surfstat (boolean): If True, outputs the surface in SurfStat format. If false
            outputs the surface as BSPolyData. Default is False.

    Returns:
        surface [BSPolyData, dict]: The output surface.
    """

    if filenames.ndim is not 2:
        raise ValueError("Filenames must be a 2-dimensional array.")

    for i in range(0, filenames.shape[0]):
        surfaces = np.empty(filenames.shape[1], dtype=np.object)
        for j in range(0, filenames.shape[1]):

            # Check whether input is BSPolyData or a filename.
            if isinstance(filenames[i, j], BSPolyData):
                surfaces[j] = filenames[i, j]
            else:
                surfaces[j] = read_surface(filenames[i, j])

            # Concatenate second dimension of filenames.
            if j is 0:
                tri = get_cells(surfaces[j])
                coord = get_points(surfaces[j])
            else:
                tri = np.concatenate(
                    (tri, get_cells(surfaces[j]) + coord.shape[0]), axis=0
                )
                coord = np.concatenate((coord, get_points(surfaces[j])), axis=0)

        if i is 0:
            m = 1
            coord_all = coord
        else:
            coord_all = fun(coord_all, coord)
            m = fun(m, 1)

    coord_all = coord_all / m

    if output_surfstat:
        surface = {"tri": np.array(tri) + 1, "coord": np.array(coord_all).T}
    else:
        surface = build_polydata(coord_all, tri)

    return surface
示例#2
0
def slm2files(slm, basename, test_num):
    """Converts an SLM to its output files.

    Parameters
    ----------
    slm : brainstat.stats.SLM
        SLM object.
    basename : str
        Base name for the file.
    test_num : int
        Number of the test.
    """
    D = slm2dict(slm)
    D.pop("model")
    D.pop("contrast")
    if "_surf" in D and isinstance(D["_surf"], BSPolyData):
        D["surf"] = {
            "tri": np.array(get_cells(D["_surf"])),
            "coord": np.array(get_points(D["_surf"])).T,
        }
        D.pop("_surf")
        D.pop("_tri")

    filename = datadir(basename + "_" + f"{test_num:02d}" + "_OUT.pkl")
    with open(filename, "wb") as f:
        pickle.dump(D, f, protocol=4)
示例#3
0
def generate_tests():
    pial_fs5 = datasets.fetch_surf_fsaverage()["pial_left"]
    pial_surf = read_surface_gz(pial_fs5)
    tri = np.array(get_cells(pial_surf)) + 1

    np.random.seed(0)
    data = {"tri": tri, "Y": np.random.uniform(-1, 1, (1, 10242)), "FWHM": 3}
    O = generate_smooth_out(data)
    params2files(data, O, 1)
示例#4
0
文件: SLM.py 项目: MICA-MNI/BrainStat
 def surf(self, value):
     self._surf = value
     if self.surf is not None:
         if isinstance(self.surf, BSPolyData):
             self.tri = np.array(get_cells(self.surf)) + 1
             self.coord = np.array(get_points(self.surf)).T
         elif isinstance(self.surf, Nifti1Image):
             self.lat = self.surf.get_fdata() != 0
         else:
             if "tri" in value:
                 self.tri = value["tri"]
                 self.coord = value["coord"]
             elif "lat" in value:
                 self.lat = value["lat"]
                 self.coord = value["coord"]
示例#5
0
def dict2pkl(D, basename, test_num, input=True):
    if "surf" in D and D["surf"] is not None:
        D["surf"] = {
            "tri": np.array(get_cells(D["surf"])),
            "coord": np.array(get_points(D["surf"])).T,
        }

    if "_tri" in D:
        D.pop("_tri")

    if "_surf" in D and D["_surf"] is not None:
        D["surf"] = {
            "tri": np.array(get_cells(D["_surf"])),
            "coord": np.array(get_points(D["_surf"])),
        }
        D.pop("_surf")

    if input:
        stage = "IN"
    else:
        stage = "OUT"
    filename = datadir(basename + "_" + f"{test_num:02d}" + "_" + stage + ".pkl")
    with open(filename, "wb") as f:
        pickle.dump(D, f, protocol=4)
示例#6
0
def matlab_SurfStatEdg(surf):

    from brainspace.vtk_interface.wrappers.data_object import BSPolyData
    from brainspace.mesh.mesh_elements import get_cells

    if isinstance(surf, BSPolyData):
        surf_mat = {'tri': np.array(get_cells(surf))+1}
    else:
        surf_mat = surf.copy()

    for key in surf_mat.keys():
        if np.ndim(surf_mat[key]) == 0:
            surf_mat[key] = surfstat_eng.double(surf_mat[key].item())
        else:
            surf_mat[key] = matlab.double(surf_mat[key].tolist())
    edg = surfstat_eng.SurfStatEdg(surf_mat)
    return np.array(edg)
示例#7
0
def matlab_SurfStatLinMod(Y, M, surf=None, niter=1, thetalim=0.01, drlim=0.1):

    from term import Term
    from brainspace.mesh.mesh_elements import get_cells
    from brainspace.vtk_interface.wrappers.data_object import BSPolyData

    if isinstance(Y, np.ndarray):
        Y = matlab.double(Y.tolist())
    else:
        Y = surfstat_eng.double(Y)

    if isinstance(M, np.ndarray):
        M = {'matrix': matlab.double(M.tolist())}

    elif isinstance(M, Term):
        M = surfstat_eng.term(matlab.double(M.matrix.values.tolist()),
                              M.matrix.columns.tolist())
    else:  # Random
        M1 = matlab.double(M.mean.matrix.values.tolist())
        V1 = matlab.double(M.variance.matrix.values.tolist())

        M = surfstat_eng.random(V1, M1, surfstat_eng.cell(0),
                                surfstat_eng.cell(0), 1)

    # Only require 'tri' or 'lat'
    if surf is None:
        k = None
        surf = surfstat_eng.cell(0)
    else:
        if isinstance(surf,BSPolyData):
            surf = {'tri': np.array(get_cells(surf))+1}
        k = 'tri' if 'tri' in surf else 'lat'
        s = surf[k]
        surf = {k: matlab.int64(s.tolist())}

    slm = surfstat_eng.SurfStatLinMod(Y, M, surf, niter, thetalim, drlim)
    for key in ['SSE', 'coef']:
        if key not in slm:
            continue
        slm[key] = np.atleast_2d(slm[key])
    slm = {k: v if np.isscalar(v) else np.array(v) for k, v in slm.items()}
   
    return slm
def dummy_test(py_surfaces, fun = np.add):
    # Run functions
    mat_surf = sw.matlab_SurfStatAvSurf(py_surfaces, fun)
    py_out = py_SurfStatAvSurf(py_surfaces, fun)
    py_surf = {'tri': np.array(get_cells(py_out)+1), 
               'coord': np.array(get_points(py_out)).T}
    
    # Sort triangles. 
    py_surf['tri'] = np.sort(py_surf['tri'], axis=1)
    mat_surf['tri'] = np.sort(mat_surf['tri'], axis=1)
    
    # Check equality.
    for k in set.union(set(py_surf.keys()), set(mat_surf.keys())):
        assert k in mat_surf, "'%s' missing from MATLAB slm." % k
        assert k in py_surf, "'%s' missing from Python slm." % k

        if k not in ['df', 'dr']:
            assert mat_surf[k].shape == py_surf[k].shape, \
                "Different shape: %s" % k
        assert np.allclose(mat_surf[k], py_surf[k]), "Not equal: %s" % k
def generate_random_slm(surf, n_var=1, dfs=None, mask=None, cluster_threshold=0.001):
    """Generates a valid SLM for a surface.

    Parameters
    ----------
    surf : BSPolyData
        Brain surface.
    n_var : int, optional
        slm.k, by default 1.
    dfs : np.array, None, optional
        Effective degrees of freedom, by default None.
    mask : np.array, optional
        Boolean mask, by default None.
    cluster_threshold : float, optional
        Cluster threshold, by default 0.001.

    Returns
    -------
    brainstat.stats.SLM
        SLM object.
    """
    triangles = np.array(get_cells(surf))
    edges = get_edges(surf)
    vertices = get_points(surf)

    n_vertices = vertices.shape[0]
    n_edges = edges.shape[0]

    slm = generate_slm(
        t=np.random.random_sample((1, n_vertices)),
        df=np.random.randint(2, 100),
        k=n_var,
        resl=np.random.random_sample((n_edges, 1)),
        tri=triangles + 1,
        surf=surf,
        dfs=dfs,
        mask=mask,
        cluster_threshold=cluster_threshold,
    )
    return slm
def generate_test_data():
    pial_fs5 = datasets.fetch_surf_fsaverage()["pial_left"]
    pial_surf = read_surface_gz(pial_fs5)
    real_tri = np.array(get_cells(pial_surf))

    np.random.seed(0)

    mygrid = [
        {
            "Y": [
                np.random.randint(1, 10, size=(1, 20)),
                np.random.randint(1, 10, size=(2, 20)),
                np.random.randint(2, 10, size=(3, 20, 4)),
            ],
            "mask": [None,
                     np.random.choice(a=[False, True], size=(20, ))],
        },
        {
            "Y": [real_tri],
            "mask": [
                None,
                np.random.choice(a=[False, True], size=(real_tri.shape[1]))
            ],
        },
    ]

    myparamgrid = ParameterGrid(mygrid)

    # Here wo go!
    # Tests 1-4 : Y is 2D or 3D arrays type int, mask is None or random bool
    # Tests 6-8 : Y is pial_fs5 trinagles, mask is None or random bool

    test_num = 0
    for params in myparamgrid:
        test_num += 1
        I = {}
        for key in params.keys():
            I[key] = params[key]
        D = generate_mesh_normalize_out(I)
        params2files(I, D, test_num)
示例#11
0
def save_input_dict(params, basename, test_num):
    """Saves the input data.

    Parameters
    ----------
    params : dict
        Parameters provided by the parameter grid.
    basename : str
        Tag to save the file with.
    test_num : int
        Number of the test.
    """
    filename = datadir(basename + "_" + f"{test_num:02d}" + "_IN.pkl")

    if isinstance(params["surf"], BSPolyData):
        params["surf"] = {
            "tri": np.array(get_cells(params["surf"])) + 1,
            "coord": np.array(get_points(params["surf"])).T,
        }

    with open(filename, "wb") as f:
        pickle.dump(params, f, protocol=4)
示例#12
0
def _compute_resls(self, Y):
    """Computes the sum over observations of squares of differences of
    normalized residuals along each edge.

    Parameters
    ----------
    Y : numpy.array
        Response variable residual matrix.

    Returns
    -------
    numpy.array
        Sum over observations of squares of differences of normalized residuals
        along each edge.
    dict
        Dictionary containing the mesh connections in either triangle or lattice
        format. The dictionary's sole key is 'tri' for triangle connections or
        'lat' for lattice connections.
    """
    if isinstance(self.surf, BSPolyData):
        mesh_connections = {"tri": np.array(get_cells(self.surf)) + 1}
    else:
        key = "tri" if "tri" in self.surf else "lat"
        mesh_connections = {key: self.surf[key]}

    edges = mesh_edges(self.surf, self.mask)

    n_edges = edges.shape[0]

    Y = np.atleast_3d(Y)
    resl = np.zeros((n_edges, Y.shape[2]))

    for j in range(Y.shape[2]):
        normr = np.sqrt(self.SSE[((j + 1) * (j + 2) // 2) - 1])
        for i in range(Y.shape[0]):
            u = Y[i, :, j] / normr
            resl[:, j] += np.diff(u[edges], axis=1).ravel()**2

    return resl, mesh_connections
示例#13
0
def generate_tests():
    # Test 01
    # ['tri'] will be a np array, shape (4, 3), int64
    np.random.seed(0)
    rand_dict = {}
    rand_dict["tri"] = np.random.randint(1, int(10), size=(4, 3))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 1)

    # Test 02
    # ['tri'] :np array, shape (4, 3), int64
    # ['resl'] :np array, shape (8, 6), float64
    np.random.seed(0)
    rand_dict = {}
    n_vertices = 6
    rand_dict["tri"] = np.random.randint(1, n_vertices, size=(4, 3))
    rand_dict["resl"] = np.random.random_sample((8, n_vertices))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 2)

    # Test 03
    # ['tri'] :np array, shape (4, 3), int64
    # ['resl'] :np array, shape (8, 6), float64
    # ['mask'] :np array, shape (5,), bool
    np.random.seed(0)
    rand_dict = {}
    n_vertices = 6
    rand_dict["tri"] = np.random.randint(1, n_vertices, size=(4, 3))
    rand_dict["resl"] = np.random.random_sample((8, n_vertices))
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 3)

    # Test 04
    # ['lat'] :np array, shape (10, 10, 10), float64
    np.random.seed(0)
    rand_dict = {}
    rand_dict["lat"] = np.ones((10, 10, 10))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 4)

    # Test 05
    # ['lat'] :np array, shape (10, 10, 10), bool
    np.random.seed(0)
    rand_dict = {}
    rand_dict["lat"] = np.random.choice(a=[False, True], size=(10, 10, 10))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 5)

    # Test 06
    # ['tri] :np array, shape (1000,3)
    # ['mask'] :np array, shape (['tri'].max(),), bool
    np.random.seed(0)
    rand_dict = {}
    rand_dict["tri"] = np.random.randint(1, n_vertices, size=(1000, 3))
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 6)

    # Test 07
    # ['lat'] :np array, shape (10, 10, 10), bool
    # ['resl'] :np array, shape (1359, 1), float64
    np.random.seed(0)
    rand_dict = {}
    rand_dict["lat"] = np.random.choice(a=[False, True], size=(10, 10, 10))
    rand_dict["resl"] = np.random.random_sample((1359, 1))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 7)

    # Test 08
    # ['tri] :np array, shape (1000,3)
    # ['mask'] :np array, shape (499,), bool
    np.random.seed(1)
    rand_dict = {}
    rand_dict["tri"] = np.random.randint(1, 499, size=(1000, 3))
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 8)

    # Test 09
    # ['lat'] :np array, shape (10, 10, 10), bool
    # ['resl'] :np array, shape (1198, 1), float64
    np.random.seed(1)
    rand_dict = {}
    rand_dict["lat"] = np.random.choice(a=[False, True], size=(10, 10, 10))
    rand_dict["resl"] = np.random.random_sample((1198, 1))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 9)

    # Test 10
    # ['tri'] is pial_fs5, shape (20480, 3)
    pial_fs5 = datasets.fetch_surf_fsaverage()["pial_left"]
    pial_surf = read_surface_gz(pial_fs5)
    n_vertices = get_points(pial_surf).shape[0]
    rand_dict = {}
    rand_dict["tri"] = np.array(get_cells(pial_surf)) + 1
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 10)

    # Test 11
    # ['tri'] :pial_fs5, shape (20480, 3)
    # ['mask'] :np array, shape (['tri'].max(),), bool
    np.random.seed(0)
    rand_dict = {}
    rand_dict["tri"] = np.array(get_cells(pial_surf)) + 1
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 11)

    # Test 12
    # ['tri'] :pial_fs5, shape (20480, 3) --> shuffle
    # ['mask'] :np array, shape (['tri'].max(),), bool
    np.random.seed(5)
    rand_dict = {}
    rand_dict["tri"] = np.array(get_cells(pial_surf)) + 1
    np.random.shuffle(rand_dict["tri"])
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 12)
示例#14
0
def py_SurfStatLinMod(Y, M, surf=None, niter=1, thetalim=0.01, drlim=0.1):
    """ Fits linear mixed effects models to surface data and estimates resels.

    Parameters
    ----------
    Y : ndarray, shape = (n_samples, n_verts) or (n_samples, n_verts, n_feats)
        Surface data.
    M : Term or Random
        Design matrix.
    surf : dict or BSPolyData, optional
        Surface triangles (surf['tri']) or volumetric data (surf['lat']).
        If 'tri', shape = (n_edges, 2). If 'lat', then it is a boolean 3D
        array. Alternatively a BSPolyData object can be provided. Default is None.
    niter : int, optional
        Number of extra iterations of the Fisher scoring algorithm for fitting
        mixed effects models. Default is 1.
    thetalim : float, optional
        Lower limit on variance coefficients, in sd's. Default is 0.01.
    drlim : float, optional
        Step of ratio of variance coefficients, in sd's. Default 0.1.

    Returns
    -------
    slm : dict
        Dictionary with the following keys:

        - 'X' : ndarray, shape = (n_samples, n_pred)
            Design matrix.
        - 'df' : int
            Degrees of freedom.
        - 'coef' : ndarray, shape = (n_pred, n_verts)
            Model coefficients.
        - 'SSE' : ndarray, shape = (n_feat, n_verts)
            Sum of square errors.
        - 'V' : ndarray, shape = (n_samples, n_samples, n_rand)
            Variance matrix bases. Only when mixed effects.
        - 'r' : ndarray, shape = (n_rand - 1, n_verts)
            Coefficients of the first (q-1) components of 'V' divided by their
            sum. Coefficients are clamped to a minimum of 0.01 x sd.
            Only when mixed effects.
        - 'dr' : ndarray
             Vector of increments in 'r' = 0.1 x sd
        - 'resl' : ndarray, (n_edges, n_feat)
            Sum over observations of squares of differences of normalized
            residuals along each edge. Only when ``surf is not None``.
        - 'tri' : ndarray, (n_cells, 3)
            Cells in surf. Only when ``surf is not None``.
        - 'lat' : ndarray
            Neighbors in lattice.

    """

    n, v = Y.shape[:2]  # number of samples x number of points
    k = 1 if Y.ndim == 2 else Y.shape[2]  # number of features

    # Get data from term/random
    V = None
    if isinstance(M, Random):
        X, Vl = M.mean.matrix.values, M.variance.matrix.values

        # check in var contains intercept (constant term)
        n2, q = Vl.shape
        II = np.identity(n).ravel()

        r = II - Vl @ (la.pinv(Vl) @ II)
        if (r**2).mean() > np.finfo(float).eps:
            warnings.warn('Did you forget an error term, I? :-)')

        if q > 1 or q == 1 and np.abs(II - Vl.T).sum() > 0:
            V = Vl.reshape(n, n, -1)

    else:  # No random term
        q = 1
        if isinstance(M, Term):
            X = M.matrix.values
        else:
            if M.size > 1:
                warnings.warn('If you don'
                              't convert vectors to terms you can '
                              'get unexpected results :-(')
            X = M

        if X.shape[0] == 1:
            X = np.tile(X, (n, 1))

    # check if term (x) contains intercept (constant term)
    pinvX = la.pinv(X)
    r = 1 - X @ pinvX.sum(1)
    if (r**2).mean() > np.finfo(float).eps:
        warnings.warn('Did you forget an error term, I? :-)')

    p = X.shape[1]  # number of predictors
    df = n - la.matrix_rank(X)  # degrees of freedom

    slm = dict(df=df, X=X)

    if k == 1:  # Univariate

        if q == 1:  # Fixed effects

            if V is None:  # OLS
                coef = pinvX @ Y
                Y = Y - X @ coef

            else:
                V = V / np.diag(V).mean(0)
                Vmh = la.inv(la.cholesky(V).T)

                coef = (la.pinv(Vmh @ X) @ Vmh) @ Y
                Y = Vmh @ Y - (Vmh @ X) @ coef

            sse = np.sum(Y**2, axis=0)

        else:  # mixed effects

            q1 = q - 1

            V /= np.diagonal(V, axis1=0, axis2=1).mean(-1)
            slm_r = np.zeros((q1, v))

            # start Fisher scoring algorithm
            R = np.eye(n) - X @ la.pinv(X)
            RVV = (V.T @ R.T).T
            E = (Y.T @ (R.T @ RVV.T))
            E *= Y.T
            E = E.sum(-1)

            RVV2 = np.zeros([n, n, q])
            E2 = np.zeros([q, v])
            for j in range(q):
                RV2 = R @ V[..., j]
                E2[j] = (Y * ((RV2 @ R) @ Y)).sum(0)
                RVV2[..., j] = RV2

            M = np.einsum('ijk,jil->kl', RVV, RVV, optimize='optimal')

            theta = la.pinv(M) @ E
            tlim = np.sqrt(2 * np.diag(la.pinv(M))) * thetalim
            tlim = tlim[:, None] * theta.sum(0)
            m = theta < tlim
            theta[m] = tlim[m]
            r = theta[:q1] / theta.sum(0)

            Vt = 2 * la.pinv(M)
            m1 = np.diag(Vt)
            m2 = 2 * Vt.sum(0)
            Vr = m1[:q1] - m2[:q1] * slm_r.mean(1) + Vt.sum() * (r**2).mean(-1)
            dr = np.sqrt(Vr) * drlim

            # Extra Fisher scoring iterations
            for it in range(niter):
                irs = np.round(r.T / dr)
                ur, jr = np.unique(irs, axis=0, return_inverse=True)
                nr = ur.shape[0]
                for ir in range(nr):
                    iv = jr == ir
                    rv = r[:, iv].mean(1)

                    Vs = (1 - rv.sum()) * V[..., q - 1]
                    Vs += (V[..., :q1] * rv).sum(-1)

                    Vinv = la.inv(Vs)
                    VinvX = Vinv @ X
                    G = la.pinv(X.T @ VinvX) @ VinvX.T
                    R = Vinv - VinvX @ G

                    RVV = (V.T @ R.T).T
                    E = (Y[:, iv].T @ (R.T @ RVV.T))
                    E *= Y[:, iv].T
                    E = E.sum(-1)

                    M = np.einsum('ijk,jil->kl', RVV, RVV, optimize='optimal')

                    thetav = la.pinv(M) @ E
                    tlim = np.sqrt(2 * np.diag(la.pinv(M))) * thetalim
                    tlim = tlim[:, None] * thetav.sum(0)

                    m = thetav < tlim
                    thetav[m] = tlim[m]
                    theta[:, iv] = thetav

                r = theta[:q1] / theta.sum(0)

            # finish Fisher scoring
            irs = np.round(r.T / dr)
            ur, jr = np.unique(irs, axis=0, return_inverse=True)
            nr = ur.shape[0]

            coef = np.zeros((p, v))
            sse = np.zeros(v)
            for ir in range(nr):
                iv = jr == ir
                rv = r[:, iv].mean(1)

                Vs = (1 - rv.sum()) * V[..., q - 1]
                Vs += (V[..., :q1] * rv).sum(-1)

                # Vmh = la.inv(la.cholesky(Vs).T)
                Vmh = la.inv(la.cholesky(Vs))
                VmhX = Vmh @ X
                G = (la.pinv(VmhX.T @ VmhX) @ VmhX.T) @ Vmh

                coef[:, iv] = G @ Y[:, iv]
                R = Vmh - VmhX @ G
                Y[:, iv] = R @ Y[:, iv]
                sse[iv] = (Y[:, iv]**2).sum(0)

            slm.update(dict(r=r, dr=dr[:, None]))

        sse = sse[None]

    else:  # multivariate
        if q > 1:
            raise ValueError('Multivariate mixed effects models not yet '
                             'implemented :-(')

        if V is None:
            X2 = X
        else:
            V = V / np.diag(V).mean(0)
            Vmh = la.inv(la.cholesky(V)).T
            X2 = Vmh @ X
            pinvX = la.pinv(X2)
            Y = Vmh @ Y

        coef = pinvX @ Y.T.swapaxes(-1, -2)
        Y = Y - (X2 @ coef).swapaxes(-1, -2).T
        coef = coef.swapaxes(-1, -2).T

        k2 = k * (k + 1) // 2
        sse = np.zeros((k2, v))
        j = -1
        for j1 in range(k):
            for j2 in range(j1 + 1):
                j = j + 1
                sse[j] = (Y[..., j1] * Y[..., j2]).sum(0)

    slm.update(dict(coef=coef, SSE=sse))
    if V is not None:
        slm['V'] = V

    if surf is not None and (isinstance(surf, BSPolyData) or
                             ('tri' in surf or 'lat' in surf)):
        if isinstance(surf, BSPolyData):
            slm['tri'] = np.array(get_cells(surf)) + 1
        else:
            key = 'tri' if 'tri' in surf else 'lat'
            slm[key] = surf[key]

        edges = py_SurfStatEdg(surf)

        n_edges = edges.shape[0]

        resl = np.zeros((n_edges, k))
        Y = np.atleast_3d(Y)

        for j in range(k):
            normr = np.sqrt(sse[((j + 1) * (j + 2) // 2) - 1])
            for i in range(n):
                u = Y[i, :, j] / normr
                resl[:, j] += np.diff(u[edges], axis=1).ravel()**2
        slm['resl'] = resl

    return slm
示例#15
0
sys.path.append("/data/p_02323/BrainStat/surfstat/")
sys.path.append("/data/p_02323/BrainStat/surfstat/python")
from SurfStatLinMod import py_SurfStatLinMod
from SurfStatT import py_SurfStatT
from SurfStatSmooth import py_SurfStatSmooth
from term import Term
from brainspace.mesh import mesh_elements
from brainspace.datasets import load_conte69

# load poly data for 64k surface (for the test & plotting)
surf_lh, surf_rh = load_conte69()

# write surface coordinates and triangles in a dictionary
lh_coord = np.array(mesh_elements.get_points(surf_lh)).T
rh_coord = np.array(mesh_elements.get_points(surf_rh)).T
lh_tri = np.array(mesh_elements.get_cells(surf_lh))
rh_tri = np.array(mesh_elements.get_cells(surf_rh))

D = {}
D['coord'] = np.concatenate((lh_coord, rh_coord), axis=1)  # (3, 64984)
D['tri'] = np.concatenate((lh_tri, rh_tri + lh_coord.shape[1]))  # (129960, 3)

# Test
Y = C64k_all
contrast = np.ones((len(mylist), 1))
M = 1 + Term(contrast)
slm = py_SurfStatLinMod(Y, contrast, D)
slm = py_SurfStatT(slm, contrast)
T = slm['t']

h = h5py.File(os.path.join(odir, 'Tvals_cortex709_%s.h5' % (subfield)), 'w')
示例#16
0
def cortical_ribbon(pial_mesh, wm_mesh, nii, mesh_distance=6):
    """Finds voxels inside of the cortical ribbon.

    Parameters
    ----------
    pial_mesh : BSPolyData
        Pial mesh.
    wm_mesh : BSPolyData
        White matter mesh.
    nii : Nibabel nifti
        Nifti image containing the space in which to output the ribbon.
    mesh_distance : int, optional
        Maximum distance from the cortical mesh at which the ribbon may occur.
        Used to reduce the search space, by default 6.

    Returns
    -------
    numpy.array
        Matrix coordinates of voxels inside the cortical ribbon.
    """

    try:
        import pyembree
    except ImportError:
        ModuleNotFoundError(
            "The package pyembree is required for this function. " +
            "You can install it with the conda package manager: " +
            "`conda install -c conda-forge pyembree`.")

    # Get world coordinates.
    x, y, z, _ = np.meshgrid(range(nii.shape[0]), range(nii.shape[1]),
                             range(nii.shape[2]), 0)

    points = np.reshape(np.concatenate((x, y, z), axis=3), (-1, 3), order="F")
    world_coord = nib.affines.apply_affine(nii.affine, points)

    logging.debug(
        "Discarding points that exceed the minima/maxima of the pial mesh.")
    # Discard points that exceed any of the maxima/minima
    pial_points = np.array(get_points(pial_mesh))
    discard = np.any(
        # If points exceed maximum coordinates
        (world_coord > np.amax(pial_points, axis=0)) |
        # If points are lower than minimum coordinates
        (world_coord < np.amin(pial_points, axis=0)),
        axis=1,
    )
    world_coord = world_coord[np.logical_not(discard), :]

    # Discard points that are more than mesh_distance from the pial and wm mesh.
    logging.debug("Discarding points that are too far from the meshes.")
    tree = cKDTree(pial_points)
    mindist_pial, _ = tree.query(world_coord)

    wm_points = np.array(get_points(wm_mesh))
    tree = cKDTree(wm_points)
    mindist_wm, _ = tree.query(world_coord)

    world_coord = world_coord[(mindist_pial < mesh_distance) &
                              (mindist_wm < mesh_distance), :]

    # Check which points are inside pial but not inside WM (i.e. ribbon)
    logging.debug(
        "Retaining only points that are inside the pial but not the WM mesh.")
    pial_trimesh = trimesh.ray.ray_pyembree.RayMeshIntersector(
        trimesh.Trimesh(
            vertices=np.array(get_points(pial_mesh)),
            faces=np.array(get_cells(pial_mesh)),
        ))
    wm_trimesh = trimesh.ray.ray_pyembree.RayMeshIntersector(
        trimesh.Trimesh(vertices=np.array(get_points(wm_mesh)),
                        faces=np.array(get_cells(wm_mesh))))

    inside_wm = wm_trimesh.contains_points(world_coord)
    inside_pial = pial_trimesh.contains_points(world_coord)
    inside_ribbon = world_coord[inside_pial & ~inside_wm, :]
    ribbon_points = nib.affines.apply_affine(np.linalg.inv(nii.affine),
                                             inside_ribbon)
    return ribbon_points
示例#17
0
def test_mesh_elements():
    s = _generate_sphere()

    ee = vtk.vtkExtractEdges()
    ee.SetInputData(s.VTKObject)
    ee.Update()
    ee = wrap_vtk(ee.GetOutput())
    n_edges = ee.n_cells

    assert np.all(me.get_points(s) == s.Points)
    assert np.all(me.get_cells(s) == s.GetCells2D())
    assert me.get_extent(s).shape == (3, )

    pc = me.get_point2cell_connectivity(s)
    assert pc.shape == (s.n_points, s.n_cells)
    assert pc.dtype == np.uint8
    assert np.all(pc.sum(axis=0) == 3)

    cp = me.get_cell2point_connectivity(s)
    assert pc.dtype == np.uint8
    assert (pc - cp.T).nnz == 0

    adj = me.get_immediate_adjacency(s)
    assert adj.shape == (s.n_points, s.n_points)
    assert adj.dtype == np.uint8
    assert adj.nnz == (2 * n_edges + s.n_points)

    adj2 = me.get_immediate_adjacency(s, include_self=False)
    assert adj2.shape == (s.n_points, s.n_points)
    assert adj2.dtype == np.uint8
    assert adj2.nnz == (2 * n_edges)

    radj = me.get_ring_adjacency(s)
    assert radj.dtype == np.uint8
    assert (adj - radj).nnz == 0

    radj2 = me.get_ring_adjacency(s, include_self=False)
    assert radj2.dtype == np.uint8
    assert (adj2 - radj2).nnz == 0

    radj3 = me.get_ring_adjacency(s, n_ring=2, include_self=False)
    assert radj3.dtype == np.uint8
    assert (radj3 - adj2).nnz > 0

    d = me.get_immediate_distance(s)
    assert d.shape == (s.n_points, s.n_points)
    assert d.dtype == np.float
    assert d.nnz == adj2.nnz

    d2 = me.get_immediate_distance(s, metric='sqeuclidean')
    d_sq = d.copy()
    d_sq.data **= 2
    assert np.allclose(d_sq.A, d2.A)

    rd = me.get_ring_distance(s)
    assert rd.dtype == np.float
    assert np.allclose(d.A, rd.A)

    rd2 = me.get_ring_distance(s, n_ring=2)
    assert (rd2 - d).nnz > 0

    assert me.get_cell_neighbors(s).shape == (s.n_cells, s.n_cells)
    assert me.get_edges(s).shape == (n_edges, 2)
    assert me.get_edge_length(s).shape == (n_edges, )

    assert me.get_boundary_points(s).size == 0
    assert me.get_boundary_edges(s).size == 0
    assert me.get_boundary_cells(s).size == 0
示例#18
0
def generate_test_data():
    np.random.seed(0)

    # these are the sizes of array, which will be looped in Parameter Grid
    mygrid_xy = [{"x": [3], "y": [1]}, {"x": [6], "y": [6]}, {"x": [5], "y": [2]}]
    myparamgrid_xy = ParameterGrid(mygrid_xy)

    # Here wo go!
    # Tests 1-12: randomly generated arrays for all input params
    test_num = 0
    for params_xy in list(myparamgrid_xy):
        x = params_xy["x"]
        y = params_xy["y"]
        mygrid = [
            {
                "X": [np.random.rand(x, y)],
                "df": [int(y - 1)],
                "coef": [
                    np.random.rand(y, x),
                ],
                "SSE": [np.random.rand(1, x)],
                "contrast": [np.random.rand(1, y), np.random.rand(1, 1)],
                "dr": [None, int(y + 1)],
            }
        ]
        # here goes the actual Parameter Grid
        myparamgrid = ParameterGrid(mygrid)
        for params in myparamgrid:
            test_num += 1
            I = {}
            for key in params.keys():
                if params[key] is not None:
                    I[key] = params[key]
            D = generate_t_test_out(I)
            params2files(I, D, test_num)

    # get some real data for the triangle coordinates
    pial_fs5 = datasets.fetch_surf_fsaverage()["pial_left"]
    surf = read_surface_gz(pial_fs5)
    tri = np.array(get_cells(surf))

    realgrid = [
        {
            "X": [np.random.randint(0, 1000, size=(20, 9))],
            "df": [np.random.randint(1, 9)],
            "coef": [np.random.rand(9, int(tri.shape[0])) - 0.7],
            "SSE": [np.random.rand(1, int(tri.shape[0]))],
            "tri": [tri],
            "resl": [np.random.rand(int(tri.shape[0]), 1)],
            "contrast": [np.random.randint(21, 48, size=(20, 1))],
        }
    ]

    # Test 13: triangle coordinates are real, from pial_fs5, rest is random
    myrealgrid = ParameterGrid(realgrid)
    for params in myrealgrid:
        test_num += 1
        I = {}
        for key in params.keys():
            if params[key] is not None:
                I[key] = params[key]
        D = generate_t_test_out(I)
        params2files(I, D, test_num)