示例#1
0
def _surf2surf(
    source: BSPolyData,
    target: BSPolyData,
    values: ArrayLike,
    interpolation: str = "nearest",
) -> np.ndarray:
    """Performs an interpolations between two surfaces.

    Parameters
    ----------
    source : BSPolyData
        Source surface.
    target : BSPolyData
        Target surface.
    values : np.ndarray
        Values on source surface.
    interpolation : str, optional
        Interpolation type, valid values are "nearest" and "linear", by default "nearest".

    Returns
    -------
    np.ndarray
        Interpolated values on target surface.
    """
    source_coord = get_points(source)
    target_coord = get_points(target)

    if interpolation == "nearest":
        interp = NearestNDInterpolator(source_coord, values)
    elif interpolation == "linear":
        interp = LinearNDInterpolator(source_coord, values)
    else:
        ValueError("Unknown interpolation type.")

    return interp(target_coord)
示例#2
0
def ribbon_interpolation(
    pial_mesh: BSPolyData,
    wm_mesh: BSPolyData,
    labels: Union[str, np.ndarray],
    nii: nib.nifti1.Nifti1Image,
    points: np.ndarray,
    interpolation: str = "nearest",
) -> np.ndarray:
    """Performs label interpolation in the cortical ribbon.

    Parameters
    ----------
    pial_mesh : BSPolyData
        Pial mesh.
    wm_mesh : BSPolydata
        White matter mesh.
    labels : str, numpy.ndarray
        Filename of a .label.gii or .shape.gii file, or a numpy array
        containing the labels.
    nii : Nibabel nifti
        Reference nifti image.
    points : numpy.array
        Numpy array containing the coordinates of the ribbon.
    interpolation : str, optional
        Interpolation method. Can be either 'nearest' or 'linear'.

    Returns
    -------
    numpy.ndarray
        Interpolated value for each input point.

    Notes
    -----
    Strictly, this function will work outside the cortical ribbon too and assign
    any point to its label on the nearest mesh. An adventurous user could use
    this for nearest neighbour surface to volume anywhere in the brain, although
    such usage is not offically supported.
    """

    if not isinstance(labels, np.ndarray):
        labels = nib.gifti.giftiio.read(labels).agg_data()

    mesh_coord = np.concatenate((get_points(pial_mesh), get_points(wm_mesh)),
                                axis=0)

    # Repeat labels as we concatenate the pial/white meshes.
    labels = np.concatenate((labels, labels))

    ribbon_coord = nib.affines.apply_affine(nii.affine, points)

    if interpolation == "nearest":
        interp = NearestNDInterpolator(mesh_coord, labels)
    elif interpolation == "linear":
        interp = LinearNDInterpolator(mesh_coord, labels)
    else:
        ValueError("Unknown interpolation type.")

    return interp(ribbon_coord)
示例#3
0
def mesh_average(filenames, fun=np.add, output_surfstat=False):
    """Average, minimum, or maximum of surfaces.

    Args:
        filenames (2D numpy array): Numpy array of filenames of surfaces or BSPolyData objects.

        fun : function handle to apply to two surfaces, e.g.
        np.add (default) will give the average of the surfaces,
        np.fmin or np.fmax will give the min or max, respectively.

        output_surfstat (boolean): If True, outputs the surface in SurfStat format. If false
            outputs the surface as BSPolyData. Default is False.

    Returns:
        surface [BSPolyData, dict]: The output surface.
    """

    if filenames.ndim is not 2:
        raise ValueError("Filenames must be a 2-dimensional array.")

    for i in range(0, filenames.shape[0]):
        surfaces = np.empty(filenames.shape[1], dtype=np.object)
        for j in range(0, filenames.shape[1]):

            # Check whether input is BSPolyData or a filename.
            if isinstance(filenames[i, j], BSPolyData):
                surfaces[j] = filenames[i, j]
            else:
                surfaces[j] = read_surface(filenames[i, j])

            # Concatenate second dimension of filenames.
            if j is 0:
                tri = get_cells(surfaces[j])
                coord = get_points(surfaces[j])
            else:
                tri = np.concatenate(
                    (tri, get_cells(surfaces[j]) + coord.shape[0]), axis=0
                )
                coord = np.concatenate((coord, get_points(surfaces[j])), axis=0)

        if i is 0:
            m = 1
            coord_all = coord
        else:
            coord_all = fun(coord_all, coord)
            m = fun(m, 1)

    coord_all = coord_all / m

    if output_surfstat:
        surface = {"tri": np.array(tri) + 1, "coord": np.array(coord_all).T}
    else:
        surface = build_polydata(coord_all, tri)

    return surface
示例#4
0
def generate_test_data():
    np.random.seed(0)
    surface = _generate_sphere()
    parameters = [
        {
            "n_observations": [103],
            "n_vertices": [np.array(get_points(surface)).shape[0]],
            "n_variates": [1, 2, 3],
            "n_random": [0],
            "n_predictors": [1, 7],
            "surf": [None, surface],
        },
        {
            "n_observations": [103],
            "n_vertices": [np.array(get_points(surface)).shape[0]],
            "n_variates": [1],
            "n_random": [1],
            "n_predictors": [2, 7],
            "surf": [None, surface],
        },
    ]

    test_num = 0
    for params in ParameterGrid(parameters):
        test_num += 1
        Y, M = generate_random_data_model(
            params["n_observations"],
            params["n_vertices"],
            params["n_variates"],
            params["n_predictors"],
        )
        model = array2effect(M, params["n_random"])
        contrast = -M[:, -1]

        save_input_dict(
            {
                "Y": Y,
                "M": M,
                "contrast": contrast,
                "surf": params["surf"],
                "n_random": params["n_random"],
            },
            "xstatt",
            test_num,
        )

        slm = SLM(model, contrast, params["surf"])
        slm.linear_model(Y)
        slm.t_test()

        slm2files(slm, "xstatt", test_num)
示例#5
0
def __create_precomputed(data_dir: Optional[Union[str, Path]] = None) -> None:
    """Create nearest neighbor interpolation niftis for MATLAB."""
    # Embed import to prevent circular dependency.
    from brainstat.datasets import fetch_template_surface

    data_dir = Path(
        data_dir) if data_dir else data_directories["BRAINSTAT_DATA_DIR"]
    mni152 = load_mni152_brain_mask()
    for template in ("fsaverage5", "fsaverage", "civet41k", "civet164k"):
        output_file = data_dir / f"nn_interp_{template}.nii.gz"
        if output_file.exists():
            continue
        top_surf = "pial" if template[:9] == "fsaverage" else "mid"
        pial = fetch_template_surface(template, layer=top_surf, join=False)
        white = fetch_template_surface(template, layer="white", join=False)
        labels = (
            np.arange(1,
                      get_points(pial[0]).shape[0] + 1),
            np.arange(
                get_points(pial[0]).shape[0] + 1,
                get_points(pial[0]).shape[0] * 2 + 1),
        )
        multi_surface_to_volume(
            pial=pial,
            white=white,
            volume_template=mni152,
            labels=labels,
            output_file=str(output_file),
            interpolation="nearest",
        )

    if not (data_dir / "nn_interp_hcp.nii.gz").exists():
        import hcp_utils as hcp
        from brainspace.mesh.mesh_creation import build_polydata

        pial_fslr32k = (build_polydata(hcp.mesh.pial[0], hcp.mesh.pial[1]), )
        white_fslr32k = (build_polydata(hcp.mesh.white[0],
                                        hcp.mesh.white[1]), )
        labels_fslr32k = (np.arange(1,
                                    get_points(pial_fslr32k[0]).shape[0] +
                                    1), )
        multi_surface_to_volume(
            pial=pial_fslr32k,
            white=white_fslr32k,
            volume_template=mni152,
            labels=labels_fslr32k,
            output_file=str(data_dir / "nn_interp_fslr32k.nii.gz"),
            interpolation="nearest",
        )
示例#6
0
def slm2files(slm, basename, test_num):
    """Converts an SLM to its output files.

    Parameters
    ----------
    slm : brainstat.stats.SLM
        SLM object.
    basename : str
        Base name for the file.
    test_num : int
        Number of the test.
    """
    D = slm2dict(slm)
    D.pop("model")
    D.pop("contrast")
    if "_surf" in D and isinstance(D["_surf"], BSPolyData):
        D["surf"] = {
            "tri": np.array(get_cells(D["_surf"])),
            "coord": np.array(get_points(D["_surf"])).T,
        }
        D.pop("_surf")
        D.pop("_tri")

    filename = datadir(basename + "_" + f"{test_num:02d}" + "_OUT.pkl")
    with open(filename, "wb") as f:
        pickle.dump(D, f, protocol=4)
示例#7
0
文件: SLM.py 项目: MICA-MNI/BrainStat
 def surf(self, value):
     self._surf = value
     if self.surf is not None:
         if isinstance(self.surf, BSPolyData):
             self.tri = np.array(get_cells(self.surf)) + 1
             self.coord = np.array(get_points(self.surf)).T
         elif isinstance(self.surf, Nifti1Image):
             self.lat = self.surf.get_fdata() != 0
         else:
             if "tri" in value:
                 self.tri = value["tri"]
                 self.coord = value["coord"]
             elif "lat" in value:
                 self.lat = value["lat"]
                 self.coord = value["coord"]
示例#8
0
def dict2pkl(D, basename, test_num, input=True):
    if "surf" in D and D["surf"] is not None:
        D["surf"] = {
            "tri": np.array(get_cells(D["surf"])),
            "coord": np.array(get_points(D["surf"])).T,
        }

    if "_tri" in D:
        D.pop("_tri")

    if "_surf" in D and D["_surf"] is not None:
        D["surf"] = {
            "tri": np.array(get_cells(D["_surf"])),
            "coord": np.array(get_points(D["_surf"])),
        }
        D.pop("_surf")

    if input:
        stage = "IN"
    else:
        stage = "OUT"
    filename = datadir(basename + "_" + f"{test_num:02d}" + "_" + stage + ".pkl")
    with open(filename, "wb") as f:
        pickle.dump(D, f, protocol=4)
def generate_random_slm(surf,
                        n_var=1,
                        dfs=None,
                        mask=None,
                        cluster_threshold=0.001):
    """Generates a valid SLM for a surface.

    Parameters
    ----------
    surf : BSPolyData
        Brain surface.
    n_var : int, optional
        slm.k, by default 1.
    dfs : np.array, None, optional
        Effective degrees of freedom, by default None.
    mask : np.array, optional
        Boolean mask, by default None.
    cluster_threshold : float, optional
        Cluster threshold, by default 0.001.

    Returns
    -------
    brainstat.stats.SLM
        SLM object.
    """
    edges = get_edges(surf)
    vertices = get_points(surf)

    n_vertices = vertices.shape[0]
    n_edges = edges.shape[0]

    slm = generate_slm(
        t=np.random.random_sample((1, n_vertices)),
        df=np.random.randint(2, 100),
        k=n_var,
        resl=np.random.random_sample((n_edges, 1)),
        surf=surf,
        dfs=dfs,
        mask=mask,
        cluster_threshold=cluster_threshold,
    )
    return slm
示例#10
0
def dummy_test(py_surfaces, fun = np.add):
    # Run functions
    mat_surf = sw.matlab_SurfStatAvSurf(py_surfaces, fun)
    py_out = py_SurfStatAvSurf(py_surfaces, fun)
    py_surf = {'tri': np.array(get_cells(py_out)+1), 
               'coord': np.array(get_points(py_out)).T}
    
    # Sort triangles. 
    py_surf['tri'] = np.sort(py_surf['tri'], axis=1)
    mat_surf['tri'] = np.sort(mat_surf['tri'], axis=1)
    
    # Check equality.
    for k in set.union(set(py_surf.keys()), set(mat_surf.keys())):
        assert k in mat_surf, "'%s' missing from MATLAB slm." % k
        assert k in py_surf, "'%s' missing from Python slm." % k

        if k not in ['df', 'dr']:
            assert mat_surf[k].shape == py_surf[k].shape, \
                "Different shape: %s" % k
        assert np.allclose(mat_surf[k], py_surf[k]), "Not equal: %s" % k
示例#11
0
def save_input_dict(params, basename, test_num):
    """Saves the input data.

    Parameters
    ----------
    params : dict
        Parameters provided by the parameter grid.
    basename : str
        Tag to save the file with.
    test_num : int
        Number of the test.
    """
    filename = datadir(basename + "_" + f"{test_num:02d}" + "_IN.pkl")

    if isinstance(params["surf"], BSPolyData):
        params["surf"] = {
            "tri": np.array(get_cells(params["surf"])) + 1,
            "coord": np.array(get_points(params["surf"])).T,
        }

    with open(filename, "wb") as f:
        pickle.dump(params, f, protocol=4)
示例#12
0
def test_mesh_elements():
    s = _generate_sphere()

    ee = vtk.vtkExtractEdges()
    ee.SetInputData(s.VTKObject)
    ee.Update()
    ee = wrap_vtk(ee.GetOutput())
    n_edges = ee.n_cells

    assert np.all(me.get_points(s) == s.Points)
    assert np.all(me.get_cells(s) == s.GetCells2D())
    assert me.get_extent(s).shape == (3, )

    pc = me.get_point2cell_connectivity(s)
    assert pc.shape == (s.n_points, s.n_cells)
    assert pc.dtype == np.uint8
    assert np.all(pc.sum(axis=0) == 3)

    cp = me.get_cell2point_connectivity(s)
    assert pc.dtype == np.uint8
    assert (pc - cp.T).nnz == 0

    adj = me.get_immediate_adjacency(s)
    assert adj.shape == (s.n_points, s.n_points)
    assert adj.dtype == np.uint8
    assert adj.nnz == (2 * n_edges + s.n_points)

    adj2 = me.get_immediate_adjacency(s, include_self=False)
    assert adj2.shape == (s.n_points, s.n_points)
    assert adj2.dtype == np.uint8
    assert adj2.nnz == (2 * n_edges)

    radj = me.get_ring_adjacency(s)
    assert radj.dtype == np.uint8
    assert (adj - radj).nnz == 0

    radj2 = me.get_ring_adjacency(s, include_self=False)
    assert radj2.dtype == np.uint8
    assert (adj2 - radj2).nnz == 0

    radj3 = me.get_ring_adjacency(s, n_ring=2, include_self=False)
    assert radj3.dtype == np.uint8
    assert (radj3 - adj2).nnz > 0

    d = me.get_immediate_distance(s)
    assert d.shape == (s.n_points, s.n_points)
    assert d.dtype == np.float
    assert d.nnz == adj2.nnz

    d2 = me.get_immediate_distance(s, metric='sqeuclidean')
    d_sq = d.copy()
    d_sq.data **= 2
    assert np.allclose(d_sq.A, d2.A)

    rd = me.get_ring_distance(s)
    assert rd.dtype == np.float
    assert np.allclose(d.A, rd.A)

    rd2 = me.get_ring_distance(s, n_ring=2)
    assert (rd2 - d).nnz > 0

    assert me.get_cell_neighbors(s).shape == (s.n_cells, s.n_cells)
    assert me.get_edges(s).shape == (n_edges, 2)
    assert me.get_edge_length(s).shape == (n_edges, )

    assert me.get_boundary_points(s).size == 0
    assert me.get_boundary_edges(s).size == 0
    assert me.get_boundary_cells(s).size == 0
示例#13
0
def cortical_ribbon(pial_mesh, wm_mesh, nii, mesh_distance=6):
    """Finds voxels inside of the cortical ribbon.

    Parameters
    ----------
    pial_mesh : BSPolyData
        Pial mesh.
    wm_mesh : BSPolyData
        White matter mesh.
    nii : Nibabel nifti
        Nifti image containing the space in which to output the ribbon.
    mesh_distance : int, optional
        Maximum distance from the cortical mesh at which the ribbon may occur.
        Used to reduce the search space, by default 6.

    Returns
    -------
    numpy.array
        Matrix coordinates of voxels inside the cortical ribbon.
    """

    try:
        import pyembree
    except ImportError:
        ModuleNotFoundError(
            "The package pyembree is required for this function. " +
            "You can install it with the conda package manager: " +
            "`conda install -c conda-forge pyembree`.")

    # Get world coordinates.
    x, y, z, _ = np.meshgrid(range(nii.shape[0]), range(nii.shape[1]),
                             range(nii.shape[2]), 0)

    points = np.reshape(np.concatenate((x, y, z), axis=3), (-1, 3), order="F")
    world_coord = nib.affines.apply_affine(nii.affine, points)

    logging.debug(
        "Discarding points that exceed the minima/maxima of the pial mesh.")
    # Discard points that exceed any of the maxima/minima
    pial_points = np.array(get_points(pial_mesh))
    discard = np.any(
        # If points exceed maximum coordinates
        (world_coord > np.amax(pial_points, axis=0)) |
        # If points are lower than minimum coordinates
        (world_coord < np.amin(pial_points, axis=0)),
        axis=1,
    )
    world_coord = world_coord[np.logical_not(discard), :]

    # Discard points that are more than mesh_distance from the pial and wm mesh.
    logging.debug("Discarding points that are too far from the meshes.")
    tree = cKDTree(pial_points)
    mindist_pial, _ = tree.query(world_coord)

    wm_points = np.array(get_points(wm_mesh))
    tree = cKDTree(wm_points)
    mindist_wm, _ = tree.query(world_coord)

    world_coord = world_coord[(mindist_pial < mesh_distance) &
                              (mindist_wm < mesh_distance), :]

    # Check which points are inside pial but not inside WM (i.e. ribbon)
    logging.debug(
        "Retaining only points that are inside the pial but not the WM mesh.")
    pial_trimesh = trimesh.ray.ray_pyembree.RayMeshIntersector(
        trimesh.Trimesh(
            vertices=np.array(get_points(pial_mesh)),
            faces=np.array(get_cells(pial_mesh)),
        ))
    wm_trimesh = trimesh.ray.ray_pyembree.RayMeshIntersector(
        trimesh.Trimesh(vertices=np.array(get_points(wm_mesh)),
                        faces=np.array(get_cells(wm_mesh))))

    inside_wm = wm_trimesh.contains_points(world_coord)
    inside_pial = pial_trimesh.contains_points(world_coord)
    inside_ribbon = world_coord[inside_pial & ~inside_wm, :]
    ribbon_points = nib.affines.apply_affine(np.linalg.inv(nii.affine),
                                             inside_ribbon)
    return ribbon_points
示例#14
0
def generate_tests():
    # Test 01
    # ['tri'] will be a np array, shape (4, 3), int64
    np.random.seed(0)
    rand_dict = {}
    rand_dict["tri"] = np.random.randint(1, int(10), size=(4, 3))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 1)

    # Test 02
    # ['tri'] :np array, shape (4, 3), int64
    # ['resl'] :np array, shape (8, 6), float64
    np.random.seed(0)
    rand_dict = {}
    n_vertices = 6
    rand_dict["tri"] = np.random.randint(1, n_vertices, size=(4, 3))
    rand_dict["resl"] = np.random.random_sample((8, n_vertices))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 2)

    # Test 03
    # ['tri'] :np array, shape (4, 3), int64
    # ['resl'] :np array, shape (8, 6), float64
    # ['mask'] :np array, shape (5,), bool
    np.random.seed(0)
    rand_dict = {}
    n_vertices = 6
    rand_dict["tri"] = np.random.randint(1, n_vertices, size=(4, 3))
    rand_dict["resl"] = np.random.random_sample((8, n_vertices))
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 3)

    # Test 04
    # ['lat'] :np array, shape (10, 10, 10), float64
    np.random.seed(0)
    rand_dict = {}
    rand_dict["lat"] = np.ones((10, 10, 10))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 4)

    # Test 05
    # ['lat'] :np array, shape (10, 10, 10), bool
    np.random.seed(0)
    rand_dict = {}
    rand_dict["lat"] = np.random.choice(a=[False, True], size=(10, 10, 10))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 5)

    # Test 06
    # ['tri] :np array, shape (1000,3)
    # ['mask'] :np array, shape (['tri'].max(),), bool
    np.random.seed(0)
    rand_dict = {}
    rand_dict["tri"] = np.random.randint(1, n_vertices, size=(1000, 3))
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 6)

    # Test 07
    # ['lat'] :np array, shape (10, 10, 10), bool
    # ['resl'] :np array, shape (1359, 1), float64
    np.random.seed(0)
    rand_dict = {}
    rand_dict["lat"] = np.random.choice(a=[False, True], size=(10, 10, 10))
    rand_dict["resl"] = np.random.random_sample((1359, 1))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 7)

    # Test 08
    # ['tri] :np array, shape (1000,3)
    # ['mask'] :np array, shape (499,), bool
    np.random.seed(1)
    rand_dict = {}
    rand_dict["tri"] = np.random.randint(1, 499, size=(1000, 3))
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 8)

    # Test 09
    # ['lat'] :np array, shape (10, 10, 10), bool
    # ['resl'] :np array, shape (1198, 1), float64
    np.random.seed(1)
    rand_dict = {}
    rand_dict["lat"] = np.random.choice(a=[False, True], size=(10, 10, 10))
    rand_dict["resl"] = np.random.random_sample((1198, 1))
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 9)

    # Test 10
    # ['tri'] is pial_fs5, shape (20480, 3)
    pial_fs5 = datasets.fetch_surf_fsaverage()["pial_left"]
    pial_surf = read_surface_gz(pial_fs5)
    n_vertices = get_points(pial_surf).shape[0]
    rand_dict = {}
    rand_dict["tri"] = np.array(get_cells(pial_surf)) + 1
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 10)

    # Test 11
    # ['tri'] :pial_fs5, shape (20480, 3)
    # ['mask'] :np array, shape (['tri'].max(),), bool
    np.random.seed(0)
    rand_dict = {}
    rand_dict["tri"] = np.array(get_cells(pial_surf)) + 1
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 11)

    # Test 12
    # ['tri'] :pial_fs5, shape (20480, 3) --> shuffle
    # ['mask'] :np array, shape (['tri'].max(),), bool
    np.random.seed(5)
    rand_dict = {}
    rand_dict["tri"] = np.array(get_cells(pial_surf)) + 1
    np.random.shuffle(rand_dict["tri"])
    rand_dict["mask"] = np.random.choice(
        a=[True, False], size=(rand_dict["tri"].max(),)
    )
    In, Out = generate_random_slm(rand_dict)
    params2files(In, Out, 12)
def generate_test_data():
    ## Fetch global data and settings.
    # Global test settings
    basename = "xstatp"
    test_num = 1
    np.random.seed(0)

    # Fetch surface data.
    pial_fs5 = datasets.fetch_surf_fsaverage()["pial_left"]
    pial_surf = read_surface_gz(pial_fs5)
    n_vertices = get_points(pial_surf).shape[0]

    ## Define the test parameter grids.
    # Variable types to test
    var_types = {
        "t": [np.float64],
        "df": [int, np.uint16],
        "k": [int, np.uint8],
        "resl": [np.float64],
        "tri": [np.int64],
    }
    type_parameter_grid = ParameterGrid(var_types)

    # Optional variable test.
    var_optional = {
        "dfs": [None, np.random.randint(1, 100, (1, n_vertices))],
        "cluster_threshold": [0.1, 2],
        "mask": [None, np.random.rand(n_vertices) > 0.1],
        "k": [1, 3],
    }

    # Nonsense variables to add.
    var_nonsense = ["X", "coef", "SSE", "c", "ef", "sd"]

    ## Generate test data
    # Variable type tests
    for params in type_parameter_grid:
        slm = generate_random_slm(pial_surf)
        for key in list(params.keys()):
            attr = getattr(slm, key)
            setattr(slm, key, params[key](attr))
        slm2files(slm, basename, test_num)
        test_num += 1

    # Additional variable tests.
    additional_parameter_grid = ParameterGrid(var_optional)
    for params in additional_parameter_grid:
        slm = generate_random_slm(pial_surf)
        for key in list(params.keys()):
            setattr(slm, key, params[key])
        slm2files(slm, basename, test_num)
        test_num += 1

    # Nonsense variable tests.
    slm = generate_random_slm(pial_surf)
    slm.dfs = np.random.randint(1, 100, (1, n_vertices))
    slm.mask = np.random.rand(n_vertices) > 0.1
    for key in var_nonsense:
        if getattr(slm, key) is None:
            setattr(
                slm,
                key,
                np.random.rand(np.random.randint(1, 10),
                               np.random.randint(1, 10)),
            )
    slm2files(slm, basename, test_num)
    test_num += 1
示例#16
0
print(C64k_all.shape, C64k_all.mean(axis=0).max())

sys.path.append("/data/p_02323/BrainStat/surfstat/")
sys.path.append("/data/p_02323/BrainStat/surfstat/python")
from SurfStatLinMod import py_SurfStatLinMod
from SurfStatT import py_SurfStatT
from SurfStatSmooth import py_SurfStatSmooth
from term import Term
from brainspace.mesh import mesh_elements
from brainspace.datasets import load_conte69

# load poly data for 64k surface (for the test & plotting)
surf_lh, surf_rh = load_conte69()

# write surface coordinates and triangles in a dictionary
lh_coord = np.array(mesh_elements.get_points(surf_lh)).T
rh_coord = np.array(mesh_elements.get_points(surf_rh)).T
lh_tri = np.array(mesh_elements.get_cells(surf_lh))
rh_tri = np.array(mesh_elements.get_cells(surf_rh))

D = {}
D['coord'] = np.concatenate((lh_coord, rh_coord), axis=1)  # (3, 64984)
D['tri'] = np.concatenate((lh_tri, rh_tri + lh_coord.shape[1]))  # (129960, 3)

# Test
Y = C64k_all
contrast = np.ones((len(mylist), 1))
M = 1 + Term(contrast)
slm = py_SurfStatLinMod(Y, contrast, D)
slm = py_SurfStatT(slm, contrast)
T = slm['t']