def get_roi_center(roi_native_path, roi_mni_path):
    """Get ROI center of mass.
    Get back coordinate in img space and in coordinate space.
    Also actual center of mass.
    """
    # computations in native space
    if type(roi_native_path) is str:
        img = nib.load(roi_native_path)
    else:
        img = roi_native_path
    data = img.get_data()
    data = as_ndarray(data)
    my_map = data.copy()
    center_coords = ndimage.center_of_mass(np.abs(my_map))

    x_map, y_map, z_map = center_coords[:3]
    native_coords = np.asarray(coord_transform(x_map, y_map, z_map,
                                               img.get_affine())).tolist()
    voxel = [round(x) for x in center_coords]
    # computations in mni space
    if type(roi_mni_path) is str:
        img = nib.load(roi_mni_path)
    else:
        img = roi_mni_path
    data = img.get_data()
    data = as_ndarray(data)
    my_map = data.copy()
    mni_center_coords = ndimage.center_of_mass(np.abs(my_map))
    x_map, y_map, z_map = mni_center_coords[:3]
    mni_coords = np.asarray(coord_transform(x_map, y_map, z_map,
                                            img.get_affine())).tolist()
    # returns voxel and true center mass coords
    # returns also native and mni space coords
    return (voxel[:3], center_coords[:3], [round(x) for x in native_coords],
            [round(x) for x in mni_coords])
Esempio n. 2
0
def test_coord_transform_trivial():
    sform = np.eye(4)
    x = np.random.random((10, ))
    y = np.random.random((10, ))
    z = np.random.random((10, ))

    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x, x_)
    np.testing.assert_array_equal(y, y_)
    np.testing.assert_array_equal(z, z_)

    sform[:, -1] = 1
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)

    # Test the output in case of one item array
    x, y, z = x[:1], y[:1], z[:1]
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)

    # Test the output in case of simple items
    x, y, z = x[0], y[0], z[0]
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)
Esempio n. 3
0
def test_coord_transform_trivial():
    sform = np.eye(4)
    x = np.random.random((10,))
    y = np.random.random((10,))
    z = np.random.random((10,))

    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x, x_)
    np.testing.assert_array_equal(y, y_)
    np.testing.assert_array_equal(z, z_)

    sform[:, -1] = 1
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)

    # Test the output in case of one item array
    x, y, z = x[:1], y[:1], z[:1]
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)

    # Test the output in case of simple items
    x, y, z = x[0], y[0], z[0]
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)
Esempio n. 4
0
def _get_mask_measures(mask_file):
    """ Outputs the mask

    Parameters
    ----------
    mask_file : str
        Path to the mask image
    """
    mask_img = nibabel.load(mask_file)
    volume = _get_volume(mask_img)

    mask_data = mask_img.get_data()
    i, j, k = np.where(mask_data != 0)
    voxels_coords = np.array(coord_transform(i, j, k, mask_img.affine)).T
    positions = np.vstack((i, j, k)).T
    center = ndimage.center_of_mass(mask_data)
    center_coords = np.array(
        coord_transform(center[0], center[1], center[2], mask_img.affine)).T
    positions = voxels_coords - center_coords  # TODO: check why not voxels_coords.mean(axis=0)
    inertia_matrix = -positions.T.dot(positions) / float(len(positions))
    total_sum = -np.diag(inertia_matrix).sum()
    inertia_matrix += np.eye(3) * total_sum
    _, eigvecs = np.linalg.eigh(inertia_matrix)
    axis_ap, axis_rl, axis_is = eigvecs.T

    # Translation to image centroid
    translation = np.eye(4)
    translation[:3, 3] = -center_coords  #voxels_coords.mean(axis=0)
    # Reorientation with respect to the principal axes
    reorientation = np.eye(4)
    reorientation[:3, 0] = axis_rl
    reorientation[:3, 1] = axis_ap
    reorientation[:3, 2] = axis_is
    affine = np.linalg.inv(translation).dot(reorientation).dot(
        translation).dot(mask_img.affine)
    reoriented_img = resample_img(mask_img, affine, interpolation='nearest')
    zooms = reoriented_img.header.get_zooms()
    reoriented_data = reoriented_img.get_data()
    reoriented_center = ndimage.center_of_mass(reoriented_data)
    reoriented_center = np.array(reoriented_center, dtype=int)
    length_rl = zooms[0] * reoriented_data[:, reoriented_center[1],
                                           reoriented_center[2]].sum()
    length_ap = zooms[1] * reoriented_data[reoriented_center[0], :,
                                           reoriented_center[2]].sum()
    length_is = zooms[2] * reoriented_data[reoriented_center[0],
                                           reoriented_center[1]].sum()

    # Reflection along the RL axis
    reflection = np.eye(4)
    reflection[0, 0] = -1
    affine = np.linalg.inv(translation).dot(reorientation).dot(reflection).dot(
        translation).dot(mask_img.affine)
    reflected_reoriented_img = resample_img(mask_img,
                                            target_affine=affine,
                                            target_shape=reoriented_data.shape,
                                            interpolation='nearest')
    reflected_reoriented_data = reflected_reoriented_img.get_data()
    symmetry = stats.pearsonr(reoriented_data.ravel(),
                              reflected_reoriented_data.ravel())[0]
    return (length_ap, length_rl, length_is, symmetry, volume)
Esempio n. 5
0
def test_sample_locations():
    # check positions of samples on toy example, with an affine != identity
    # flat horizontal mesh
    mesh = flat_mesh(5, 7)
    affine = np.diagflat([10, 20, 30, 1])
    inv_affine = np.linalg.inv(affine)
    # transform vertices to world space
    vertices = np.asarray(
        resampling.coord_transform(*mesh[0].T, affine=affine)).T
    # compute by hand the true offsets in voxel space
    # (transformed by affine^-1)
    ball_offsets = surface._load_uniform_ball_cloud(10)
    ball_offsets = np.asarray(
        resampling.coord_transform(*ball_offsets.T, affine=inv_affine)).T
    line_offsets = np.zeros((10, 3))
    line_offsets[:, 2] = np.linspace(1, -1, 10)
    line_offsets = np.asarray(
        resampling.coord_transform(*line_offsets.T, affine=inv_affine)).T
    # check we get the same locations
    for kind, offsets in [('line', line_offsets), ('ball', ball_offsets)]:
        locations = surface._sample_locations(
            [vertices, mesh[1]], affine, 1., kind=kind, n_points=10)
        true_locations = np.asarray([vertex + offsets for vertex in mesh[0]])
        assert_array_equal(locations.shape, true_locations.shape)
        assert_array_almost_equal(true_locations, locations)
    pytest.raises(ValueError, surface._sample_locations,
                  mesh, affine, 1., kind='bad_kind')
Esempio n. 6
0
def test_sample_locations():
    # check positions of samples on toy example, with an affine != identity
    # flat horizontal mesh
    mesh = flat_mesh(5, 7)
    affine = np.diagflat([10, 20, 30, 1])
    inv_affine = np.linalg.inv(affine)
    # transform vertices to world space
    vertices = np.asarray(
        resampling.coord_transform(*mesh[0].T, affine=affine)).T
    # compute by hand the true offsets in voxel space
    # (transformed by affine^-1)
    ball_offsets = surface._load_uniform_ball_cloud(10)
    ball_offsets = np.asarray(
        resampling.coord_transform(*ball_offsets.T, affine=inv_affine)).T
    line_offsets = np.zeros((10, 3))
    line_offsets[:, 2] = np.linspace(-1, 1, 10)
    line_offsets = np.asarray(
        resampling.coord_transform(*line_offsets.T, affine=inv_affine)).T
    # check we get the same locations
    for kind, offsets in [('line', line_offsets), ('ball', ball_offsets)]:
        locations = surface._sample_locations(
            [vertices, mesh[1]], affine, 1., kind=kind, n_points=10)
        true_locations = np.asarray([vertex + offsets for vertex in mesh[0]])
        assert_array_equal(locations.shape, true_locations.shape)
        assert_array_almost_equal(true_locations, locations)
    assert_raises(ValueError, surface._sample_locations,
                  mesh, affine, 1., kind='bad_kind')
Esempio n. 7
0
def get_roi_center(roi_native_path, roi_mni_path):
    """Get ROI center of mass.
    Get back coordinate in img space and in coordinate space.
    Also actual center of mass.
    """
    # computations in native space
    if type(roi_native_path) is str:
        img = nib.load(roi_native_path)
    else:
        img = roi_native_path
    data = img.get_data()
    data = as_ndarray(data)
    my_map = data.copy()
    center_coords = ndimage.center_of_mass(np.abs(my_map))

    x_map, y_map, z_map = center_coords[:3]
    native_coords = np.asarray(
        coord_transform(x_map, y_map, z_map, img.get_affine())).tolist()
    voxel = [round(x) for x in center_coords]
    # computations in mni space
    if type(roi_mni_path) is str:
        img = nib.load(roi_mni_path)
    else:
        img = roi_mni_path
    data = img.get_data()
    data = as_ndarray(data)
    my_map = data.copy()
    mni_center_coords = ndimage.center_of_mass(np.abs(my_map))
    x_map, y_map, z_map = mni_center_coords[:3]
    mni_coords = np.asarray(
        coord_transform(x_map, y_map, z_map, img.get_affine())).tolist()
    # returns voxel and true center mass coords
    # returns also native and mni space coords
    return (voxel[:3], center_coords[:3], [round(x) for x in native_coords],
            [round(x) for x in mni_coords])
Esempio n. 8
0
def _ball_sample_locations(mesh,
                           affine,
                           ball_radius=3.,
                           n_points=20,
                           depth=None):
    """Locations to draw samples from to project volume data onto a mesh.

    For each mesh vertex, the locations of `n_points` points evenly spread in a
    ball around the vertex are returned.

    Parameters
    ----------
    mesh : pair of np arrays.
        `mesh[0]` contains the 3d coordinates of the vertices
        (shape n_vertices, 3)
        `mesh[1]` contains, for each triangle, the indices into `mesh[0]` of its
        vertices (shape n_triangles, 3)

    affine : array of shape (4, 4)
        Affine transformation from image voxels to the vertices' coordinate
        space.

    ball_radius : float, optional
        Size in mm of the neighbourhood around each vertex in which to draw
        samples. Default=3.0.

    n_points : int, optional
        Number of samples to draw for each vertex. Default=20.

    depth : None
        Raises a ValueError if not None because incompatible with this sampling
        strategy.

    Returns
    -------
    sample_location_voxel_space : numpy array, shape (n_vertices, n_points, 3)
        The locations, in voxel space, from which to draw samples.
        First dimension iterates over mesh vertices, second dimension iterates
        over the sample points associated to a vertex, third dimension is x, y,
        z in voxel space.

    """
    if depth is not None:
        raise ValueError("The 'ball' sampling strategy does not support "
                         "the 'depth' parameter")
    vertices, faces = mesh
    offsets_world_space = _load_uniform_ball_cloud(
        n_points=n_points) * ball_radius
    mesh_voxel_space = np.asarray(
        resampling.coord_transform(*vertices.T,
                                   affine=np.linalg.inv(affine))).T
    linear_map = np.eye(affine.shape[0])
    linear_map[:-1, :-1] = affine[:-1, :-1]
    offsets_voxel_space = np.asarray(
        resampling.coord_transform(*offsets_world_space.T,
                                   affine=np.linalg.inv(linear_map))).T
    sample_locations_voxel_space = (mesh_voxel_space[:, np.newaxis, :] +
                                    offsets_voxel_space[np.newaxis, :])
    return sample_locations_voxel_space
Esempio n. 9
0
def _get_affinity(seeds, coords, radius, allow_overlap, affine, mask_img=None):
    
    seeds = list(seeds)

    # Compute world coordinates of all in-mask voxels.           
    mask_coords = list(zip(*coords.T))
    # For each seed, get coordinates of nearest voxel
    nearests = []
    for sx, sy, sz in seeds:
        nearest = np.round(coord_transform(sx, sy, sz, np.linalg.inv(affine)))
        nearest = nearest.astype(int)
        nearest = (nearest[0], nearest[1], nearest[2])
        try:
            nearests.append(mask_coords.index(nearest))
        except ValueError:
            nearests.append(None)

    mask_coords = np.asarray(list(zip(*mask_coords)))
    mask_coords = coord_transform(mask_coords[0], mask_coords[1],
                                  mask_coords[2], affine)
    mask_coords = np.asarray(mask_coords).T

    if (radius is not None and
            LooseVersion(sklearn.__version__) < LooseVersion('0.16')):
        # Fix for scikit learn versions below 0.16. See
        # https://github.com/scikit-learn/scikit-learn/issues/4072
        radius += 1e-6

    clf = neighbors.NearestNeighbors(radius=radius)
    A = clf.fit(mask_coords).radius_neighbors_graph(seeds)
    A = A.tolil()
    for i, nearest in enumerate(nearests):
        if nearest is None:
            continue
        A[i, nearest] = True

    # Include the voxel containing the seed itself if not masked
    mask_coords = mask_coords.astype(int).tolist()
    for i, seed in enumerate(seeds):
        try:
            A[i, mask_coords.index(seed)] = True
        except ValueError:
            # seed is not in the mask
            pass

    if not allow_overlap:
        if np.any(A.sum(axis=0) >= 2):
            raise ValueError('Overlap detected between spheres')

    return A
Esempio n. 10
0
def test_coord_transform_trivial():
    sform = np.eye(4)
    x = np.random.random((10,))
    y = np.random.random((10,))
    z = np.random.random((10,))

    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x, x_)
    np.testing.assert_array_equal(y, y_)
    np.testing.assert_array_equal(z, z_)

    sform[:, -1] = 1
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)
Esempio n. 11
0
def index_to_xy_coord(x, y, z=10):
    '''Transforms data index to coordinates of the background + offset'''
    coords = coord_transform(x,
                             y,
                             z,
                             affine=thresholded_score_map_img.get_affine())
    return np.array(coords)[np.newaxis, :] + np.array([0, 1, 0])
Esempio n. 12
0
def test_coord_transform_trivial():
    sform = np.eye(4)
    x = np.random.random((10, ))
    y = np.random.random((10, ))
    z = np.random.random((10, ))

    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x, x_)
    np.testing.assert_array_equal(y, y_)
    np.testing.assert_array_equal(z, z_)

    sform[:, -1] = 1
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)
Esempio n. 13
0
def _line_sample_locations(mesh,
                           affine,
                           segment_half_width=3.,
                           n_points=10,
                           depth=None):
    """Locations to draw samples from to project volume data onto a mesh.

    For each mesh vertex, the locations of `n_points` points evenly spread in a
    segment of the normal to the vertex are returned. The line segment has
    length 2 * `segment_half_width` and is centered at the vertex.

    Parameters
    ----------
    mesh : pair of numpy.ndarray
        `mesh[0]` contains the 3d coordinates of the vertices
        (shape n_vertices, 3)
        `mesh[1]` contains, for each triangle, the indices into `mesh[0]` of its
        vertices (shape n_triangles, 3)

    affine : numpy.ndarray of shape (4, 4)
        Affine transformation from image voxels to the vertices' coordinate
        space.

    segment_half_width : float, optional
        Size in mm of the neighbourhood around each vertex in which to draw
        samples. Default=3.0.

    n_points : int, optional
        Number of samples to draw for each vertex. Default=10.

    depth : sequence of floats or None, optional
        Cortical depth, expressed as a fraction of segment_half_width.
        Overrides n_points.

    Returns
    -------
    sample_location_voxel_space : numpy array, shape (n_vertices, n_points, 3)
        The locations, in voxel space, from which to draw samples.
        First dimension iterates over mesh vertices, second dimension iterates
        over the sample points associated to a vertex, third dimension is x, y,
        z in voxel space.

    """
    vertices, faces = mesh
    normals = _vertex_outer_normals(mesh)
    if depth is None:
        offsets = np.linspace(segment_half_width, -segment_half_width,
                              n_points)
    else:
        offsets = -segment_half_width * np.asarray(depth)
    sample_locations = vertices[
        np.newaxis, :, :] + normals * offsets[:, np.newaxis, np.newaxis]
    sample_locations = np.rollaxis(sample_locations, 1)
    sample_locations_voxel_space = np.asarray(
        resampling.coord_transform(*np.vstack(sample_locations).T,
                                   affine=np.linalg.inv(affine))).T.reshape(
                                       sample_locations.shape)
    return sample_locations_voxel_space
Esempio n. 14
0
def apply_mask_and_get_affinity(seeds,
                                niimg,
                                radius,
                                allow_overlap,
                                n_jobs=1,
                                mask_img=None):
    import time
    start = time.time()

    seeds = list(seeds)
    affine = niimg.affine

    # Compute world coordinates of all in-mask voxels.
    mask_img = check_niimg_3d(mask_img)
    mask_img = image.resample_img(mask_img,
                                  target_affine=affine,
                                  target_shape=niimg.shape[:3],
                                  interpolation='nearest')
    mask, _ = masking._load_mask_img(mask_img)
    mask_coords = list(zip(*np.where(mask != 0)))

    X = masking._apply_mask_fmri(niimg, mask_img)

    # For each seed, get coordinates of nearest voxel
    nearests = joblib.Parallel(n_jobs=n_jobs)(
        joblib.delayed(seed_nearest)(seed_chunk, affine, mask_coords)
        for thread_id, seed_chunk in enumerate(np.array_split(seeds, n_jobs)))
    nearests = [i for j in nearests for i in j]

    mask_coords = np.asarray(list(zip(*mask_coords)))
    mask_coords = coord_transform(mask_coords[0], mask_coords[1],
                                  mask_coords[2], affine)
    mask_coords = np.asarray(mask_coords).T

    clf = neighbors.NearestNeighbors(radius=radius)
    A = clf.fit(mask_coords).radius_neighbors_graph(seeds)
    A = A.tolil()
    for i, nearest in enumerate(nearests):
        if nearest is None:
            continue
        A[i, nearest] = True
    # Include the voxel containing the seed itself if not masked
    mask_coords = mask_coords.astype(int).tolist()
    for i, seed in enumerate(seeds):
        try:
            A[i, mask_coords.index(seed)] = True
        except ValueError:
            # seed is not in the mask
            pass

    if not allow_overlap:
        if np.any(A.sum(axis=0) >= 2):
            raise ValueError('Overlap detected between spheres')

    return X, A
Esempio n. 15
0
def demo_plot_roi(**kwargs):
    """ Demo plotting an ROI
    """
    mni_affine = MNI152TEMPLATE.get_affine()
    data = np.zeros((91, 109, 91))
    # Color a asymetric rectangle around Broca area:
    x, y, z = -52, 10, 22
    x_map, y_map, z_map = coord_transform(x, y, z, np.linalg.inv(mni_affine))
    data[int(x_map) - 5 : int(x_map) + 5, int(y_map) - 3 : int(y_map) + 3, int(z_map) - 10 : int(z_map) + 10] = 1
    img = nibabel.Nifti1Image(data, mni_affine)
    return plot_roi(img, title="Broca's area", **kwargs)
Esempio n. 16
0
def coordinate_label(mni_coord, atlas='aal', thresh=None, ret_proba=False):

    if atlas == 'aal':
        atl = datasets.fetch_atlas_aal()
        atl.prob = False
    elif atlas == 'harvard_oxford':
        atl = datasets.fetch_atlas_harvard_oxford('cort-prob-2mm')
        atl.prob = True

    elif atlas == 'destrieux':
        atl = datasets.fetch_atlas_destrieux_2009()
        atl.indices = atl.labels['index']
        atl.labels = atl.labels['name']
        atl.prob = False

    atl_map = load_img(atl.maps)
    atl_aff = atl_map.affine

    if atl.prob == True:
        atl_labels = atl.labels
    if atl.prob == False:
        atl_labels = atl.labels
        atl_indices = atl.indices

    labels_out = list()

    for coord in mni_coord:

        mat_coord = np.asarray(resampling.coord_transform(
            coord[0], coord[1], coord[2], np.linalg.inv(atl_aff)),
                               dtype=int)

        if atl.prob == True and ret_proba == True:

            lab_out = get_prob_atlas_label(atl_map,
                                           atl_labels,
                                           mat_coord,
                                           thresh=thresh)

        elif atl.prob == True and ret_proba == False:

            lab_out, _ = get_prob_atlas_label(atl_map,
                                              atl_labels,
                                              mat_coord,
                                              thresh=thresh)

        elif atl.prob == False:

            lab_out = get_atlas_label(atl_map, atl_labels, atl_indices,
                                      mat_coord)

        labels_out.append(lab_out)

    return labels_out
Esempio n. 17
0
def _get_volume(img,
                threshold=0,
                atlas=None,
                stride=1,
                t_start=0,
                t_end=-1,
                n_t=50,
                t_r=None,
                marker_size=3,
                cmap=cm.cold_hot,
                symmetric_cmap=True,
                vmax=None,
                vmin=None):
    connectome = {}
    img = check_niimg_4d(img)
    t_unit = "" if not t_r else " s"
    if not t_r:
        t_r = 1
    if t_end < 0:
        t_end = img.shape[3] + t_end
    if not n_t:
        n_t = t_end - t_start
    t_idx = np.round(np.linspace(t_start, t_end, n_t)).astype(int)
    t_labels = [str(t_r * t) + t_unit for t in t_idx]
    data = _safe_get_data(img)[::stride, ::stride, ::stride, t_idx]
    mask = np.abs(data[:, :, :, 0]) > threshold
    i, j, k = mask.nonzero()
    x, y, z = coord_transform(i * stride, j * stride, k * stride, img.affine)
    for coord, cname in [(x, "x"), (y, "y"), (z, "z")]:
        connectome["_con_{}".format(cname)] = encode(
            np.asarray(coord, dtype='<f4'))
    colors = colorscale(cmap,
                        data.ravel(),
                        symmetric_cmap=symmetric_cmap,
                        vmax=vmax,
                        vmin=vmin)
    if atlas:
        atlas = check_niimg_3d(atlas)
        atlas_data = _safe_get_data(atlas)[::stride, ::stride, ::stride]
        connectome['atlas'] = encode(
            np.asarray(atlas_data[i, j, k], dtype='<f4'))
        connectome['atlas_nb'] = int(np.max(atlas_data))
    connectome['colorscale'] = colors['colors']
    connectome['cmin'] = float(colors['vmin'])
    connectome['cmax'] = float(colors['vmax'])
    connectome['n_time'] = n_t
    connectome['t_labels'] = t_labels
    values = [
        encode(np.asarray(data[i, j, k, t], dtype='<f4'))
        for t in range(data.shape[3])
    ]
    connectome['values'] = values

    return connectome
Esempio n. 18
0
def demo_plot_roi(**kwargs):
    """Demo plotting an ROI."""
    data = np.zeros((91, 109, 91))
    # Color a asymmetric rectangle around Broca area.
    x, y, z = -52, 10, 22
    x_map, y_map, z_map = coord_transform(x, y, z, np.linalg.inv(MNI_AFFINE))
    data[int(x_map) - 5:int(x_map) + 5,
         int(y_map) - 3:int(y_map) + 3,
         int(z_map) - 10:int(z_map) + 10] = 1
    img = Nifti1Image(data, MNI_AFFINE)
    return plot_roi(img, title="Broca's area", **kwargs)
Esempio n. 19
0
def test_plot_roi_contours():
    display = plot_roi(None)
    data = np.zeros((91, 109, 91))
    x, y, z = -52, 10, 22
    x_map, y_map, z_map = coord_transform(x, y, z, np.linalg.inv(mni_affine))
    data[int(x_map) - 5:int(x_map) + 5,
         int(y_map) - 3:int(y_map) + 3,
         int(z_map) - 10:int(z_map) + 10] = 1
    img = nibabel.Nifti1Image(data, mni_affine)
    plot_roi(img, cmap='RdBu', alpha=0.1, view_type='contours', linewidths=2.)
    plt.close()
Esempio n. 20
0
def demo_plot_roi(**kwargs):
    """ Demo plotting an ROI
    """
    mni_affine = MNI152TEMPLATE.get_affine()
    data = np.zeros((91, 109, 91))
    # Color a asymetric rectangle around Broca area:
    x, y, z = -52, 10, 22
    x_map, y_map, z_map = coord_transform(x, y, z, np.linalg.inv(mni_affine))
    data[int(x_map) - 5:int(x_map) + 5,
         int(y_map) - 3:int(y_map) + 3,
         int(z_map) - 10:int(z_map) + 10] = 1
    img = nibabel.Nifti1Image(data, mni_affine)
    return plot_roi(img, title="Broca's area", **kwargs)
Esempio n. 21
0
def test_coord_transform_trivial():
    rng = np.random.RandomState(42)
    sform = np.eye(4)
    x = rng.random_sample((10, ))
    y = rng.random_sample((10, ))
    z = rng.random_sample((10, ))

    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x, x_)
    np.testing.assert_array_equal(y, y_)
    np.testing.assert_array_equal(z, z_)

    sform[:, -1] = 1
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)

    # Test the output in case of one item array
    x, y, z = x[:1], y[:1], z[:1]
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)

    # Test the output in case of simple items
    x, y, z = x[0], y[0], z[0]
    x_, y_, z_ = coord_transform(x, y, z, sform)
    np.testing.assert_array_equal(x + 1, x_)
    np.testing.assert_array_equal(y + 1, y_)
    np.testing.assert_array_equal(z + 1, z_)

    # Test the outputs have the same shape as the inputs
    x = np.ones((3, 2, 4))
    y = np.ones((3, 2, 4))
    z = np.ones((3, 2, 4))
    x_, y_, z_ = coord_transform(x, y, z, sform)
    assert x.shape == x_.shape
Esempio n. 22
0
    def fit(self, imgs, y, y_mask=None, groups=None, X=None, A=None):

        # check if image is 4D
        imgs = check_niimg_4d(imgs)

        # Get the seeds
        process_mask_img = self.process_mask_img
        if self.process_mask_img is None:
            process_mask_img = self.mask_img

        # Compute world coordinates of the seeds
        process_mask, process_mask_affine = masking._load_mask_img(
            process_mask_img)
        process_mask_coords = np.where(process_mask != 0)
        process_mask_coords = coord_transform(process_mask_coords[0],
                                              process_mask_coords[1],
                                              process_mask_coords[2],
                                              process_mask_affine)
        process_mask_coords = np.asarray(process_mask_coords).T

        import time

        start = time.time()
        print("GETTING SEARCHLIGHT SPHERES")

        X, A = apply_mask_and_get_affinity(process_mask_coords,
                                           imgs,
                                           self.radius,
                                           True,
                                           mask_img=self.mask_img,
                                           n_jobs=self.n_jobs)

        self.X = X
        self.A = A
        self.y = y
        self.process_mask = process_mask

        elapsed = time.time() - start
        print(elapsed)

        print("FITTING")
        scores = search_light_rsa(X, y, None, A, y_mask, self.n_jobs,
                                  self.verbose)

        scores_3D = np.zeros((process_mask.shape) + (len(y), ))
        for i in range(len(y)):
            scores_3D[process_mask, i] = scores[:, i]
        self.scores_ = scores_3D
        return self
    def fit(self, imgs, y, groups=None):
        """Fit the spatiotemporal searchlight

        Parameters
        ----------
        imgs : Niimg-like object
            See http://nilearn.github.io/manipulating_images/input_output.html
            Use 5D image. [x voxels, y voxels, z voxels, trials, time series of each trials]

        y : 1D array-like
            Target variable to predict. Must have exactly as many elements as trials in imgs.

        groups : array-like, optional
            group label for each sample for cross validation. Must have
            exactly as many elements as trials in imgs. default None
            NOTE: will have no effect for scikit learn < 0.18 (as nilearn says)
        """

        # check if image is 5D, simply.
        if len(imgs.shape) != 5:
            raise ValueError('This SpatioTemporalSearhcLight instance needs 5D image.')

        # Get the seeds
        process_mask_img = self.mask_img

        # Compute world coordinates of the seeds
        process_mask, process_mask_affine = masking._load_mask_img(process_mask_img)
        process_mask_coords = np.where(process_mask != 0)
        process_mask_coords = coord_transform(
            process_mask_coords[0], process_mask_coords[1],
            process_mask_coords[2], process_mask_affine)
        process_mask_coords = np.asarray(process_mask_coords).T

        X, A = _apply_mask_and_get_affinity(
            process_mask_coords, imgs, self.radius, True,
            mask_img=self.mask_img)

        # Run Searchlight
        scores = self._search_light(X, y, A, groups=groups)
        scores_3D = np.zeros(process_mask.shape)
        scores_3D[process_mask] = scores

        self.scores_ = scores_3D

        return self
Esempio n. 24
0
def test_outlier_cut_coords():
    """Test to plot a subset of a large set of cuts found for a small area."""
    bg_img = load_mni152_template()
    data = np.zeros((79, 95, 79))
    affine = np.array([[-2., 0., 0., 78.],
                       [0., 2., 0., -112.],
                       [0., 0., 2., -70.],
                       [0., 0., 0., 1.]])
    # Color a cube around a corner area:
    x, y, z = 20, 22, 60
    x_map, y_map, z_map = coord_transform(x, y, z, np.linalg.inv(affine))
    data[int(x_map) - 1:int(x_map) + 1,
         int(y_map) - 1:int(y_map) + 1,
         int(z_map) - 1:int(z_map) + 1] = 1
    img = Nifti1Image(data, affine)
    cuts = find_cut_slices(img, n_cuts=20, direction='z')
    plot_stat_map(img, display_mode='z', cut_coords=cuts[-4:],
                  bg_img=bg_img)
Esempio n. 25
0
def _get_mask_measures(mask_file):
    """ Outputs the mask

    Parameters
    ----------
    mask_file : str
        Path to the mask image
    """
    # TODO: symmetry, length and width
    mask_img = nibabel.load(mask_file)
    volume = _get_volume(mask_img)

    mask_data = mask_img.get_data()
    i, j, k = np.where(mask_data != 0)
    voxels_coords = np.array(coord_transform(i, j, k, mask_img.affine)).T
    x_range, y_range, z_range = voxels_coords.max(axis=0) - \
        voxels_coords.min(axis=0)
    return x_range, y_range, z_range, volume
Esempio n. 26
0
def _sample_locations_between_surfaces(
        mesh, inner_mesh, affine, n_points=10, depth=None):
    outer_vertices, _ = mesh
    inner_vertices, _ = inner_mesh
    # when we drop support for np 1.5 replace the next 2 lines with
    # sample_locations = np.linspace(inner_vertices, outer_vertices, n_points)
    if depth is None:
        steps = np.linspace(0, 1, n_points)[:, None, None]
    else:
        steps = np.asarray(depth)[:, None, None]
    sample_locations = outer_vertices + steps * (
        inner_vertices - outer_vertices)
    sample_locations = np.rollaxis(sample_locations, 1)
    sample_locations_voxel_space = np.asarray(
        resampling.coord_transform(
            *np.vstack(sample_locations).T,
            affine=np.linalg.inv(affine))).T.reshape(sample_locations.shape)
    return sample_locations_voxel_space
Esempio n. 27
0
def get_seeds(ds, radius):
    
    if check_proximity(ds, radius):
        return load_proximity(ds, radius)
    
    
    # Get the seeds
    process_mask_coords = ds.fa.voxel_indices.T
    process_mask_coords = coord_transform(
        process_mask_coords[0], process_mask_coords[1],
        process_mask_coords[2], ds.a.imgaffine)
    process_mask_coords = np.asarray(process_mask_coords).T
    
    seeds = process_mask_coords
    coords = ds.fa.voxel_indices
    logger.info("Building proximity matrix...")
    A = _get_affinity(seeds, coords, radius, allow_overlap=True, affine=ds.a.imgaffine)
    
    save_proximity(ds, radius, A)
    
    return A
def test_outlier_cut_coords():
    """ Test to plot a subset of a large set of cuts found for a small area."""
    bg_img = load_mni152_template()

    data = np.zeros((79, 95, 79))
    affine = np.array([[  -2.,    0.,    0.,   78.],
                       [   0.,    2.,    0., -112.],
                       [   0.,    0.,    2.,  -70.],
                       [   0.,    0.,    0.,    1.]])

    # Color a cube around a corner area:
    x, y, z = 20, 22, 60
    x_map, y_map, z_map = coord_transform(x, y, z,
                                          np.linalg.inv(affine))

    data[int(x_map) - 1:int(x_map) + 1,
         int(y_map) - 1:int(y_map) + 1,
         int(z_map) - 1:int(z_map) + 1] = 1
    img = nibabel.Nifti1Image(data, affine)
    cuts = find_cut_slices(img, n_cuts=20, direction='z')

    p = plot_stat_map(img, display_mode='z', cut_coords=cuts[-4:],
                      bg_img=bg_img)
def cluster_stats(stat_img,
                  mask_img,
                  threshold,
                  height_control='fpr',
                  cluster_th=0,
                  nulls=None):
    """
    Return a list of clusters, each cluster being represented by a
    dictionary. Clusters are sorted by descending size order. Within
    each cluster, local maxima are sorted by descending statical value

    Parameters
    ----------
    stat_img: Niimg-like object,
       statsitical image (presumably in z scale)
    mask_img: Niimg-like object,
        mask image
    threshold: float,
        cluster forming threshold (either a p-value or z-scale value)
    height_control: string
        false positive control meaning of cluster forming
        threshold: 'fpr'|'fdr'|'bonferroni'|'none'
    cluster_th: int or float,
        cluster size threshold
    nulls: dictionary,
        statistics of the null distribution

    Notes
    -----
    If there is no cluster, an empty list is returned
    """
    if nulls is None: nulls = {}

    # Masking
    mask_img, stat_img = check_niimg(mask_img), check_niimg(stat_img)
    if not _check_same_fov(mask_img, stat_img):
        raise ValueError('mask_img and stat_img do not have the same fov')
    mask = mask_img.get_data().astype(np.bool)
    affine = mask_img.get_affine()
    stat_map = stat_img.get_data() * mask
    n_voxels = mask.sum()

    # Thresholding
    if height_control == 'fpr':
        z_th = norm.isf(threshold)
    elif height_control == 'fdr':
        z_th = fdr_threshold(stat_map[mask], threshold)
    elif height_control == 'bonferroni':
        z_th = norm.isf(threshold / n_voxels)
    else:  # Brute-force thresholding
        z_th = threshold

    p_th = norm.sf(z_th)
    # General info
    info = {
        'n_voxels': n_voxels,
        'threshold_z': z_th,
        'threshold_p': p_th,
        'threshold_pcorr': np.minimum(1, p_th * n_voxels)
    }

    above_th = stat_map > z_th
    above_values = stat_map * above_th
    if (above_th == 0).all():
        return [], info

    # Extract connected components above threshold
    labels, n_labels = label(above_th)

    # Extract the local maxima anove the threshold
    maxima_mask = (above_values == np.maximum(z_th,
                                              maximum_filter(above_values, 3)))
    x, y, z = np.array(np.where(maxima_mask))
    maxima_coords = np.array(coord_transform(x, y, z, affine)).T
    maxima_labels = labels[maxima_mask]
    maxima_values = above_values[maxima_mask]

    # FDR-corrected p-values
    max_fdr_p_values = fdr_p_values(stat_map[mask])[maxima_mask[mask]]

    # Default "nulls"
    if not 'zmax' in nulls:
        nulls['zmax'] = 'bonferroni'
    if not 'smax' in nulls:
        nulls['smax'] = None
    if not 's' in nulls:
        nulls['s'] = None

    # Make list of clusters, each cluster being a dictionary
    clusters = []
    for k in range(n_labels):
        cluster_size = np.sum(labels == k + 1)
        if cluster_size >= cluster_th:

            # get the position of the maxima that belong to that cluster
            in_cluster = maxima_labels == k + 1

            # sort the maxima by decreasing statistical value
            max_vals = maxima_values[in_cluster]
            sorted_ = max_vals.argsort()[::-1]

            # Report significance levels in each cluster
            z_score = max_vals[sorted_]
            p_values = norm.sf(z_score)

            # Voxel-level corrected p-values
            fwer_p_value = None
            if nulls['zmax'] == 'bonferroni':
                fwer_p_value = np.minimum(1, p_values * n_voxels)
            elif isinstance(nulls['zmax'], np.ndarray):
                fwer_p_value = empirical_p_value(clusters['z_score'],
                                                 nulls['zmax'])

            # Cluster-level p-values (corrected)
            cluster_fwer_p_value = None
            if isinstance(nulls['smax'], np.ndarray):
                cluster_fwer_p_value = empirical_p_value(
                    cluster_size, nulls['smax'])

            # Cluster-level p-values (uncorrected)
            cluster_p_value = None
            if isinstance(nulls['s'], np.ndarray):
                cluster_p_value = empirical_p_value(cluster_size, nulls['s'])

            # write all this into the cluster structure
            clusters.append({
                'size':
                cluster_size,
                'maxima':
                maxima_coords[in_cluster][sorted_],
                'z_score':
                z_score,
                'fdr_p_value':
                max_fdr_p_values[in_cluster][sorted_],
                'p_value':
                p_values,
                'fwer_p_value':
                fwer_p_value,
                'cluster_fwer_p_value':
                cluster_fwer_p_value,
                'cluster_p_value':
                cluster_p_value
            })

    # Sort clusters by descending size order
    order = np.argsort(-np.array([cluster['size'] for cluster in clusters]))
    clusters = [clusters[i] for i in order]

    return clusters, info
def find_region_names_using_cut_coords(coords, atlas_img, labels=None):
    """Given list of MNI space coordinates, get names of the brain regions.

    Names of the brain regions are returned by getting nearest coordinates
    in the given `atlas_img` space iterated over the provided list of
    `coords`. These new image coordinates are then used to grab the label
    number (int) and name assigned to it. Last, these names are returned.

    Parameters
    ----------
    coords : Tuples of coordinates in a list
        MNI coordinates.

    atlas_img : Nifti-like image
        Path to or Nifti-like object. The labels (integers) ordered in
        this image should be sequential. Example: [0, 1, 2, 3, 4] but not
        [0, 5, 6, 7]. Helps in returning correct names without errors.

    labels : str in a list
        Names of the brain regions assigned to each label in atlas_img.
        NOTE: label with index 0 is assumed as background. Example:
            harvard oxford atlas. Hence be removed.

    Returns
    -------
    new_labels : int in a list
        Labels in integers generated according to correspondence with
        given atlas image and provided coordinates.

    names : str in a list
        Names of the brain regions generated according to given inputs.
    """
    if not isinstance(coords, collections.Iterable):
        raise ValueError("coords given must be a list of triplets of "
                         "coordinates in native space [(1, 2, 3)]. "
                         "You provided {0}".format(type(coords)))

    if isinstance(atlas_img, _basestring):
        atlas_img = check_niimg(atlas_img)

    affine = get_affine(atlas_img)
    atlas_data = _safe_get_data(atlas_img, ensure_finite=True)
    check_labels_from_atlas = np.unique(atlas_data)

    if labels is not None:
        names = []
        if not isinstance(labels, collections.Iterable):
            labels = np.asarray(labels)

    if isinstance(labels, collections.Iterable) and \
            isinstance(check_labels_from_atlas, collections.Iterable):
        if len(check_labels_from_atlas) != len(labels):
            warnings.warn("The number of labels provided does not match "
                          "with number of unique labels with atlas image.",
                          stacklevel=2)

    coords = list(coords)
    nearest_coordinates = []

    for sx, sy, sz in coords:
        nearest = np.round(coord_transform(sx, sy, sz, np.linalg.inv(affine)))
        nearest = nearest.astype(int)
        nearest = (nearest[0], nearest[1], nearest[2])
        nearest_coordinates.append(nearest)

    assert(len(nearest_coordinates) == len(coords))

    new_labels = []
    for coord_ in nearest_coordinates:
        # Grab index of current coordinate
        index = atlas_data[coord_]
        new_labels.append(index)
        if labels is not None:
            names.append(labels[index])

    if labels is not None:
        return new_labels, names
    else:
        return new_labels
neg_log_pvals_permuted_ols, _, _ = permuted_ols(
    tested_var, fmri_masked,
    model_intercept=True,
    n_perm=5000,  # 5,000 for the sake of time. Idealy, this should be 10,000
    n_jobs=1)  # can be changed to use more CPUs
neg_log_pvals_permuted_ols_unmasked = nifti_masker.inverse_transform(
    np.ravel(neg_log_pvals_permuted_ols))

### Visualization #############################################################
from nilearn.plotting import plot_stat_map

# Various plotting parameters
z_slice = 12  # plotted slice
from nilearn.image.resampling import coord_transform
affine = neg_log_pvals_anova_unmasked.get_affine()
_, _, k_slice = coord_transform(0, 0, z_slice,
                                linalg.inv(affine))
k_slice = round(k_slice)

threshold = - np.log10(0.1)  # 10% corrected
vmax = min(np.amax(neg_log_pvals_permuted_ols),
           np.amax(neg_log_pvals_anova))

# Plot Anova p-values
fig = plt.figure(figsize=(5, 7), facecolor='k')

display = plot_stat_map(neg_log_pvals_anova_unmasked,
                        threshold=threshold, cmap=plt.cm.autumn,
                        display_mode='z', cut_coords=[z_slice],
                        figure=fig, vmax=vmax, black_bg=True)

neg_log_pvals_anova_data = neg_log_pvals_anova_unmasked.get_data()
Esempio n. 32
0
def coord_transform_z(z, img):
    x, y, z = coord_transform(0, 0, z, img.affine)
    return z
def index_to_xy_coord(x, y, z=10):
    '''Transforms data index to coordinates of the background + offset'''
    coords = coord_transform(x, y, z,
                             affine=thresholded_score_map_img.get_affine())
    return np.array(coords)[np.newaxis, :] + np.array([0, 1, 0])
Esempio n. 34
0
def coord_transform_x(x, img):
    x, y, z = coord_transform(x, 0, 0, img.affine)
    return x
Esempio n. 35
0
def coord_transform_y(y, img):
    x, y, z = coord_transform(0, y, 0, img.affine)
    return y
def coord_transform_z(z, img):
    x, y, z = coord_transform(0, 0, z, img.affine)
    return z
Esempio n. 37
0
    def _apply_mask_and_get_affinity(seeds,
                                     niimg,
                                     radius,
                                     allow_overlap,
                                     mask_img=None):
        seeds = list(seeds)
        aff = niimg.get_affine()

        # Compute world coordinates of all in-mask voxels.
        if mask_img is not None:
            mask_img = check_niimg_3d(mask_img)
            mask_img = image.resample_img(mask_img,
                                          target_affine=aff,
                                          target_shape=niimg.shape[:3],
                                          interpolation='nearest')
            mask, _ = masking._load_mask_img(mask_img)
            mask_coords = list(zip(*np.where(mask != 0)))

            # X = masking._apply_mask_fmri(niimg, mask_img)
        else:
            mask_coords = list(np.ndindex(niimg.shape[:3]))

        # For each seed, get coordinates of nearest voxel
        nearests = []
        for sx, sy, sz in seeds:
            nearest = np.round(coord_transform(sx, sy, sz, np.linalg.inv(aff)))
            nearest = nearest.astype(int)
            nearest = (nearest[0], nearest[1], nearest[2])
            try:
                nearests.append(mask_coords.index(nearest))
            except ValueError:
                nearests.append(None)

        mask_coords = np.asarray(list(zip(*mask_coords)))
        mask_coords = coord_transform(mask_coords[0], mask_coords[1],
                                      mask_coords[2], aff)
        mask_coords = np.asarray(mask_coords).T

        if (radius is not None
                and LooseVersion(sklearn.__version__) < LooseVersion('0.16')):
            # Fix for scikit learn versions below 0.16. See
            # https://github.com/scikit-learn/scikit-learn/issues/4072
            radius += 1e-6

        clf = neighbors.NearestNeighbors(radius=radius)
        A = clf.fit(mask_coords).radius_neighbors_graph(seeds)
        A = A.tolil()

        for i, nearest in enumerate(nearests):
            if nearest is None:
                continue
            A[i, nearest] = True

        # Include the voxel containing the seed itself if not masked
        mask_coords = mask_coords.astype(int).tolist()
        for i, seed in enumerate(seeds):
            try:
                A[i, mask_coords.index(seed)] = True
            except ValueError:
                # seed is not in the mask
                pass

        if not allow_overlap:
            if np.any(A.sum(axis=0) >= 2):
                raise ValueError('Overlap detected between spheres')

        return A
Esempio n. 38
0
def get_clusters_table(stat_img, stat_threshold, cluster_threshold):
    """Creates pandas dataframe with img cluster statistics.

    Parameters
    ----------
    stat_img : Niimg-like object,
       statistical image (presumably in z scale)

    stat_threshold: float, optional
        cluster forming threshold (either a p-value or z-scale value)

    cluster_threshold : int, optional
        cluster size threshold

    Returns
    -------
    Pandas dataframe with img clusters
    """

    stat_map = stat_img.get_data()

    # If the stat threshold is too high simply return an empty dataframe
    if np.sum(stat_map > stat_threshold) == 0:
        warn('Attention: No clusters with stat higher than %f' %
             stat_threshold)
        return pd.DataFrame()

    # Extract connected components above threshold
    label_map, n_labels = label(stat_map > stat_threshold)

    for label_ in range(1, n_labels + 1):
        if np.sum(label_map == label_) < cluster_threshold:
            stat_map[label_map == label_] = 0

    # If the cluster threshold is too high simply return an empty dataframe
    # this checks for stats higher than threshold after small clusters
    # were removed from stat_map
    if np.sum(stat_map > stat_threshold) == 0:
        warn('Attention: No clusters with more than %d voxels' %
             cluster_threshold)
        return pd.DataFrame()

    label_map, n_labels = label(stat_map > stat_threshold)
    label_map = np.ravel(label_map)
    stat_map = np.ravel(stat_map)

    peaks = []
    max_stat = []
    clusters_size = []
    coords = []
    for label_ in range(1, n_labels + 1):
        cluster = stat_map.copy()
        cluster[label_map != label_] = 0

        peak = np.unravel_index(np.argmax(cluster), stat_img.get_data().shape)
        peaks.append(peak)

        max_stat.append(np.max(cluster))

        clusters_size.append(np.sum(label_map == label_))

        x_map, y_map, z_map = peak
        mni_coords = np.asarray(
            coord_transform(x_map, y_map, z_map,
                            stat_img.get_affine())).tolist()
        mni_coords = [round(x) for x in mni_coords]
        coords.append(mni_coords)

    vx, vy, vz = zip(*peaks)
    x, y, z = zip(*coords)

    columns = ['Vx', 'Vy', 'Vz', 'X', 'Y', 'Z', 'Peak stat', 'Cluster size']
    clusters_table = pd.DataFrame(list(
        zip(vx, vy, vz, x, y, z, max_stat, clusters_size)),
                                  columns=columns)

    return clusters_table
def coord_transform_y(y, img):
    x, y, z = coord_transform(0, y, 0, img.affine)
    return y
def coord_transform_x(x, img):
    x, y, z = coord_transform(x, 0, 0, img.affine)
    return x
Esempio n. 41
0
def get_searchlight_neighbours_matrix(mask_file, radius=6):
    """

    Parameters
    ----------
    mask_file : string
        Path to mask-file.
    radius : integer
        Searchlight radius in mm.

    Returns
    -------
    A : ndarray, shape(n_voxel, n_voxel)
        Affinity matrix. A[i,j]==1 if voxel_i, voxel_j are neighbours
        and 0 otherwise

    Examples
    --------
    >>> A = get_searchlight_neighbours_matrix('MNI152_2mm_brain_mask.nii')
    >>> A.shape
    """

    # heavily borrowed from nilearn: http://nilearn.github.io/
    from nilearn import image
    from nilearn import masking
    from nilearn.image.resampling import coord_transform
    from distutils.version import LooseVersion
    import sklearn
    from sklearn import neighbors
    from nilearn._utils.niimg_conversions import check_niimg_3d

    def _apply_mask_and_get_affinity(seeds,
                                     niimg,
                                     radius,
                                     allow_overlap,
                                     mask_img=None):
        seeds = list(seeds)
        aff = niimg.get_affine()

        # Compute world coordinates of all in-mask voxels.
        if mask_img is not None:
            mask_img = check_niimg_3d(mask_img)
            mask_img = image.resample_img(mask_img,
                                          target_affine=aff,
                                          target_shape=niimg.shape[:3],
                                          interpolation='nearest')
            mask, _ = masking._load_mask_img(mask_img)
            mask_coords = list(zip(*np.where(mask != 0)))

            # X = masking._apply_mask_fmri(niimg, mask_img)
        else:
            mask_coords = list(np.ndindex(niimg.shape[:3]))

        # For each seed, get coordinates of nearest voxel
        nearests = []
        for sx, sy, sz in seeds:
            nearest = np.round(coord_transform(sx, sy, sz, np.linalg.inv(aff)))
            nearest = nearest.astype(int)
            nearest = (nearest[0], nearest[1], nearest[2])
            try:
                nearests.append(mask_coords.index(nearest))
            except ValueError:
                nearests.append(None)

        mask_coords = np.asarray(list(zip(*mask_coords)))
        mask_coords = coord_transform(mask_coords[0], mask_coords[1],
                                      mask_coords[2], aff)
        mask_coords = np.asarray(mask_coords).T

        if (radius is not None
                and LooseVersion(sklearn.__version__) < LooseVersion('0.16')):
            # Fix for scikit learn versions below 0.16. See
            # https://github.com/scikit-learn/scikit-learn/issues/4072
            radius += 1e-6

        clf = neighbors.NearestNeighbors(radius=radius)
        A = clf.fit(mask_coords).radius_neighbors_graph(seeds)
        A = A.tolil()

        for i, nearest in enumerate(nearests):
            if nearest is None:
                continue
            A[i, nearest] = True

        # Include the voxel containing the seed itself if not masked
        mask_coords = mask_coords.astype(int).tolist()
        for i, seed in enumerate(seeds):
            try:
                A[i, mask_coords.index(seed)] = True
            except ValueError:
                # seed is not in the mask
                pass

        if not allow_overlap:
            if np.any(A.sum(axis=0) >= 2):
                raise ValueError('Overlap detected between spheres')

        return A

    process_mask_img = nib.load(mask_file)

    # Compute world coordinates of the seeds
    process_mask, process_mask_affine = masking._load_mask_img(
        process_mask_img)
    process_mask_coords = np.where(process_mask != 0)
    process_mask_coords = coord_transform(process_mask_coords[0],
                                          process_mask_coords[1],
                                          process_mask_coords[2],
                                          process_mask_affine)
    process_mask_coords = np.asarray(process_mask_coords).T

    A = _apply_mask_and_get_affinity(process_mask_coords,
                                     process_mask_img,
                                     radius,
                                     True,
                                     mask_img=process_mask_img)

    return A  # .toarray().astype('bool')
def cluster_stats(stat_img, mask_img, threshold, height_control='fpr',
                  cluster_th=0, nulls=None):
    """
    Return a list of clusters, each cluster being represented by a
    dictionary. Clusters are sorted by descending size order. Within
    each cluster, local maxima are sorted by descending statical value

    Parameters
    ----------
    stat_img: Niimg-like object,
       statsitical image (presumably in z scale)
    mask_img: Niimg-like object,
        mask image
    threshold: float,
        cluster forming threshold (either a p-value or z-scale value)
    height_control: string
        false positive control meaning of cluster forming
        threshold: 'fpr'|'fdr'|'bonferroni'|'none'
    cluster_th: int or float,
        cluster size threshold
    nulls: dictionary,
        statistics of the null distribution

    Notes
    -----
    If there is no cluster, an empty list is returned
    """
    if nulls is None: nulls = {}

    # Masking
    mask_img, stat_img = check_niimg(mask_img), check_niimg(stat_img)
    if not _check_same_fov(mask_img, stat_img):
        raise ValueError('mask_img and stat_img do not have the same fov')
    mask = mask_img.get_data().astype(np.bool)
    affine = mask_img.get_affine()
    stat_map = stat_img.get_data() * mask
    n_voxels = mask.sum()

    # Thresholding
    if height_control == 'fpr':
        z_th = norm.isf(threshold)
    elif height_control == 'fdr':
        z_th = fdr_threshold(stat_map[mask], threshold)
    elif height_control == 'bonferroni':
        z_th = norm.isf(threshold / n_voxels)
    else:  # Brute-force thresholding
        z_th = threshold

    p_th = norm.sf(z_th)
    # General info
    info = {'n_voxels': n_voxels,
            'threshold_z': z_th,
            'threshold_p': p_th,
            'threshold_pcorr': np.minimum(1, p_th * n_voxels)}

    above_th = stat_map > z_th
    above_values = stat_map * above_th
    if (above_th == 0).all():
        return [], info

    # Extract connected components above threshold
    labels, n_labels = label(above_th)

    # Extract the local maxima anove the threshold
    maxima_mask = (above_values ==
                   np.maximum(z_th, maximum_filter(above_values, 3)))
    x, y, z = np.array(np.where(maxima_mask))
    maxima_coords = np.array(coord_transform(x, y, z, affine)).T
    maxima_labels = labels[maxima_mask]
    maxima_values = above_values[maxima_mask]

    # FDR-corrected p-values
    max_fdr_p_values = fdr_p_values(stat_map[mask])[maxima_mask[mask]]

    # Default "nulls"
    if not 'zmax' in nulls:
        nulls['zmax'] = 'bonferroni'
    if not 'smax' in nulls:
        nulls['smax'] = None
    if not 's' in nulls:
        nulls['s'] = None

    # Make list of clusters, each cluster being a dictionary
    clusters = []
    for k in range(n_labels):
        cluster_size = np.sum(labels == k + 1)
        if cluster_size >= cluster_th:

            # get the position of the maxima that belong to that cluster
            in_cluster = maxima_labels == k + 1

            # sort the maxima by decreasing statistical value
            max_vals = maxima_values[in_cluster]
            sorted_ = max_vals.argsort()[::-1]

            # Report significance levels in each cluster
            z_score = max_vals[sorted_]
            p_values = norm.sf(z_score)

            # Voxel-level corrected p-values
            fwer_p_value = None
            if nulls['zmax'] == 'bonferroni':
                fwer_p_value = np.minimum(1, p_values * n_voxels)
            elif isinstance(nulls['zmax'], np.ndarray):
                fwer_p_value = empirical_p_value(
                    clusters['z_score'], nulls['zmax'])

            # Cluster-level p-values (corrected)
            cluster_fwer_p_value = None
            if isinstance(nulls['smax'], np.ndarray):
                cluster_fwer_p_value = empirical_p_value(
                    cluster_size, nulls['smax'])

            # Cluster-level p-values (uncorrected)
            cluster_p_value = None
            if isinstance(nulls['s'], np.ndarray):
                cluster_p_value = empirical_p_value(
                    cluster_size, nulls['s'])

            # write all this into the cluster structure
            clusters.append({
                    'size': cluster_size,
                    'maxima': maxima_coords[in_cluster][sorted_],
                    'z_score': z_score,
                    'fdr_p_value': max_fdr_p_values[in_cluster][sorted_],
                    'p_value': p_values,
                    'fwer_p_value': fwer_p_value,
                    'cluster_fwer_p_value': cluster_fwer_p_value,
                    'cluster_p_value': cluster_p_value
                    })

    # Sort clusters by descending size order
    order = np.argsort(- np.array([cluster['size'] for cluster in clusters]))
    clusters = [clusters[i] for i in order]

    return clusters, info
Esempio n. 43
0
def get_clusters_table(stat_img, stat_threshold, cluster_threshold=None,
                       min_distance=8.):
    """Creates pandas dataframe with img cluster statistics.

    Parameters
    ----------
    stat_img : Niimg-like object,
       Statistical image (presumably in z- or p-scale).

    stat_threshold: `float`
        Cluster forming threshold in same scale as `stat_img` (either a
        p-value or z-scale value).

    cluster_threshold : `int` or `None`, optional
        Cluster size threshold, in voxels.

    min_distance: `float`, optional
        Minimum distance between subpeaks in mm. Default is 8 mm.

    Returns
    -------
    df : `pandas.DataFrame`
        Table with peaks and subpeaks from thresholded `stat_img`. For binary
        clusters (clusters with >1 voxel containing only one value), the table
        reports the center of mass of the cluster, rather than any peaks/subpeaks.
    """
    cols = ['Cluster ID', 'X', 'Y', 'Z', 'Peak Stat', 'Cluster Size (mm3)']
    stat_map = stat_img.get_data()
    conn_mat = np.zeros((3, 3, 3), int)  # 6-connectivity, aka NN1 or "faces"
    conn_mat[1, 1, :] = 1
    conn_mat[1, :, 1] = 1
    conn_mat[:, 1, 1] = 1
    voxel_size = np.prod(stat_img.header.get_zooms())
    
    # Binarize using CDT
    binarized = stat_map > stat_threshold
    binarized = binarized.astype(int)
    
    # If the stat threshold is too high simply return an empty dataframe
    if np.sum(binarized) == 0:
        warnings.warn('Attention: No clusters with stat higher than %f' %
                      stat_threshold)
        return pd.DataFrame(columns=cols)
    
    # Extract connected components above cluster size threshold
    label_map = ndimage.measurements.label(binarized, conn_mat)[0]
    clust_ids = sorted(list(np.unique(label_map)[1:]))
    for c_val in clust_ids:
        if cluster_threshold is not None and np.sum(
                label_map == c_val) < cluster_threshold:
            stat_map[label_map == c_val] = 0
            binarized[label_map == c_val] = 0
    
    # If the cluster threshold is too high simply return an empty dataframe
    # this checks for stats higher than threshold after small clusters
    # were removed from stat_map
    if np.sum(stat_map > stat_threshold) == 0:
        warnings.warn('Attention: No clusters with more than %d voxels' %
                      cluster_threshold)
        return pd.DataFrame(columns=cols)
    
    # Now re-label and create table
    label_map = ndimage.measurements.label(binarized, conn_mat)[0]
    clust_ids = sorted(list(np.unique(label_map)[1:]))
    peak_vals = np.array(
            [np.max(stat_map * (label_map == c)) for c in clust_ids])
    clust_ids = [clust_ids[c] for c in
                 (-peak_vals).argsort()]  # Sort by descending max value
    
    rows = []
    for c_id, c_val in enumerate(clust_ids):
        cluster_mask = label_map == c_val
        masked_data = stat_map * cluster_mask
        
        cluster_size_mm = int(np.sum(cluster_mask) * voxel_size)
        
        # Get peaks, subpeaks and associated statistics
        subpeak_ijk, subpeak_vals = _local_max(masked_data, stat_img.affine,
                                               min_distance=min_distance)
        subpeak_xyz = np.asarray(coord_transform(subpeak_ijk[:, 0],
                                                 subpeak_ijk[:, 1],
                                                 subpeak_ijk[:, 2],
                                                 stat_img.affine)).tolist()
        subpeak_xyz = np.array(subpeak_xyz).T
        
        # Only report peak and, at most, top 3 subpeaks.
        n_subpeaks = np.min((len(subpeak_vals), 4))
        for subpeak in range(n_subpeaks):
            if subpeak == 0:
                row = [c_id + 1, subpeak_xyz[subpeak, 0],
                       subpeak_xyz[subpeak, 1], subpeak_xyz[subpeak, 2],
                       subpeak_vals[subpeak], cluster_size_mm]
            else:
                # Subpeak naming convention is cluster num + letter (1a, 1b, etc.)
                sp_id = '{0}{1}'.format(c_id + 1, ascii_lowercase[subpeak - 1])
                row = [sp_id, subpeak_xyz[subpeak, 0], subpeak_xyz[subpeak, 1],
                       subpeak_xyz[subpeak, 2], subpeak_vals[subpeak], '']
            rows += [row]
    df = pd.DataFrame(columns=cols, data=rows)
    return df
Esempio n. 44
0
    def compute_spheres_values(self, imgs):
        start_t = time.time()

        # Force to get a list of imgs even if only one is given
        imgs_to_check = [imgs] if not isinstance(imgs, list) else imgs

        # Load Nifti images
        ref_shape = imgs_to_check[0].dataobj.shape
        ref_affine = imgs_to_check[0].affine
        imgs = []
        for img in imgs_to_check:
            # check if image is 4D
            imgs.append(check_niimg_4d(img))

            # Check that all images have same number of volumes
            if ref_shape != img.dataobj.shape:
                raise ValueError("All fMRI image must have same shape")
            if np.array_equal(ref_affine, img.affine):
                warnings.warn("fMRI images do not have same affine")
        self.ref_img = imgs[0]

        # Compute world coordinates of the seeds
        process_mask_img = check_niimg_3d(self.process_mask_img)
        process_mask_img = image.resample_to_img(
            process_mask_img, imgs[0], interpolation='nearest'
        )
        self.process_mask_img = process_mask_img

        process_mask, process_mask_affine = masking._load_mask_img(
            process_mask_img
        )
        process_mask_coords = np.where(process_mask != 0)
        self.vx_mask_coords = process_mask_coords
        process_mask_coords = coord_transform(
            process_mask_coords[0], process_mask_coords[1],
            process_mask_coords[2], process_mask_affine)
        process_mask_coords = np.asarray(process_mask_coords).T

        if self.verbose:
            print("{} seeds found in the mask".format(len(process_mask_coords)))

        # Compute spheres
        _, A = _apply_mask_and_get_affinity(
            process_mask_coords, imgs[0], self.radius, True,
            mask_img=self.mask_img
        )

        # Number of runs: 1 4D fMRI image / run
        n_runs = len(imgs)

        # Number of spheres (or seed voxels)
        n_spheres = A.shape[0]

        # Number of volumes in each 4D fMRI image
        n_conditions = imgs[0].dataobj.shape[3]

        mask_img = check_niimg_3d(self.mask_img)
        mask_img = image.resample_img(
            mask_img, target_affine=imgs[0].affine,
            target_shape=imgs[0].shape[:3], interpolation='nearest'
        )

        masked_imgs_data = []
        for i_run, img in enumerate(imgs):
            masked_imgs_data.append(masking._apply_mask_fmri(img, mask_img))

        # Extract data of each sphere
        # X will be #spheres x #run x #conditions x #values
        X = []
        for i_sph in range(n_spheres):
            # Indexes of all voxels included in the current sphere
            sph_indexes = A.rows[i_sph]

            if len(sph_indexes) == 0:
                # Append when no data are available around the process voxel
                X.append(np.full((n_runs, n_conditions, 1), 0))
                print("Empty sphere")
            else:
                # Number of voxel in the current sphere
                n_values = len(sph_indexes)

                sub_X = np.empty((n_runs, n_conditions, n_values), dtype=object)
                for i_run, img in enumerate(imgs):
                    for i_cond in range(n_conditions):
                        sub_X[i_run, i_cond] = masked_imgs_data[i_run][i_cond][
                            sph_indexes]
                X.append(sub_X)

        if self.verbose:
            dt = time.time() - start_t
            print("Elapsed time to extract spheres values: {:.01f}s".format(dt))

        self.spheres_values = X