Exemplo n.º 1
0
def _bingham_fit_multi_peaks(odf, sphere, max_angle, min_sep_angle, rel_th):
    """
    Peak extraction followed by Bingham fit for each peak.

    Parameters
    ----------
    odf: ndarray
        ODF expressed as a spherical function evaluated on sphere.
    sphere: DIPY Sphere
        Sphere on which odf is defined.
    max_angle: float
        Maximum angle between a peak and its neighbouring directions
        to be included when fitting the Bingham distribution.
    min_sep_angle: float
        Minimum separation angle between two peaks for peak extraction.
    rel_th: float
        Relative threshold used for peak extraction.
    """
    peaks, _, _ = peak_directions(odf,
                                  sphere,
                                  relative_peak_threshold=rel_th,
                                  min_separation_angle=min_sep_angle)

    lobes = []
    for peak in peaks:
        peak_fit = _bingham_fit_peak(odf, peak, sphere, max_angle)
        lobes.append(peak_fit)

    return lobes
Exemplo n.º 2
0
def sph_peaks_t(power_map,
                theta_look,
                phi_look,
                max_n_peaks=20,
                audio_length_seconds=None,
                **kwargs):

    N_frames = power_map.shape[1]

    # set up sphere object for peak finding
    sph = Sphere(theta=phi_look, phi=theta_look)

    # set up output arrays for DOAs
    y_t = np.zeros((N_frames, max_n_peaks))
    x_t = np.zeros((N_frames, max_n_peaks))

    for i in range(N_frames):
        # peak finding in spherical data
        _, _, peaks = peak_directions(power_map[:, i], sph, **kwargs)
        # relative_peak_threshold=.5,
        # min_separation_angle=5)

        # save peaks to arrays
        xdirs = theta_look[peaks]
        ydirs = phi_look[peaks]

        # get rid of any extra peaks
        xdirs = xdirs[:max_n_peaks - 1]
        ydirs = ydirs[:max_n_peaks - 1]

        x_t[i, 0] = i
        y_t[i, 0] = i
        x_t[i, 1:len(xdirs) + 1] += xdirs
        y_t[i, 1:len(xdirs) + 1] += ydirs

    # rearranging data to a useful format
    for i in range(np.min(x_t.shape) - 1):

        xyi = np.append(np.append(x_t[:, [0]], x_t[:, [i + 1]], 1),
                        y_t[:, [i + 1]], 1)

        if 'xy_t' not in locals():
            xy_t = xyi
        else:
            xy_t = np.append(xy_t, xyi, 0)

    # remove zeros
    xy_t = xy_t[np.where(xy_t[:, 2] != 0)]

    if audio_length_seconds is not None:
        # replace frame numbers with time in seconds
        n_frames = len(power_map.T)
        time_index = np.linspace(0, audio_length_seconds, n_frames)
        # time_points = time_index[xy_t[:,0].astype(int)]
        xy_t[:, 0] = time_index[xy_t[:, 0].astype(int)]

    return xy_t
Exemplo n.º 3
0
def peaks_from_odfs(odf4d,
                    sphere,
                    relative_peak_threshold,
                    min_separation_angle,
                    mask=None,
                    gfa_thr=0,
                    normalize_peaks=False,
                    npeaks=5):

    shape = odf4d.shape[:-1]
    if mask is None:
        mask = np.ones(shape, dtype='bool')
    else:
        if mask.shape != shape:
            raise ValueError("Mask is not the same shape as data.")

    gfa_array = np.zeros(shape)
    qa_array = np.zeros((shape + (npeaks, )))

    peak_dirs = np.zeros((shape + (npeaks, 3)))
    peak_values = np.zeros((shape + (npeaks, )))
    peak_indices = np.zeros((shape + (npeaks, )), dtype='int')
    peak_indices.fill(-1)

    global_max = -np.inf
    for idx in ndindex(shape):
        if not mask[idx]:
            continue
        odf = odf4d[idx]
        gfa_array[idx] = gfa(odf)
        if gfa_array[idx] < gfa_thr:
            global_max = max(global_max, odf.max())
            continue
        # Get peaks of odf
        direction, pk, ind = peak_directions(odf, sphere,
                                             relative_peak_threshold,
                                             min_separation_angle)
        # Calculate peak metrics
        if pk.shape[0] != 0:
            global_max = max(global_max, pk[0])
            n = min(npeaks, pk.shape[0])
            qa_array[idx][:n] = pk[:n] - odf.min()
            peak_dirs[idx][:n] = direction[:n]
            peak_indices[idx][:n] = ind[:n]
            peak_values[idx][:n] = pk[:n]

            if normalize_peaks:
                peak_values[idx][:n] /= pk[0]
                peak_dirs[idx] *= peak_values[idx][:, None]

    qa_array /= global_max

    return peak_dirs, peak_values
Exemplo n.º 4
0
    def peaks_directions(self,
                         sphere,
                         max_peaks=5,
                         relative_peak_threshold=0.5,
                         min_separation_angle=25):
        """
        Returns peak directions for estimated FODs. Uses dipy's peak_directions
        function to get the local maximum on a sphere's tesselation.

        Parameters
        ----------
        sphere : Sphere
            The Sphere providing discrete directions for evaluation.
        max_peaks : integer
            The maximum number of peaks that is returned per fod.
        relative_peak_threshold : float in [0., 1.]
            Only peaks greater than min + relative_peak_threshold * scale are
            kept, where min = max(0, odf.min()) and scale = odf.max() - min.
        min_separation_angle : float in [0, 90]
            The minimum distance between directions. If two peaks are too close
            only the larger of the two is returned.

        Returns
        -------
        directions : (Ndata, Npeaks, 3,) ndarray
            N vertices for sphere, one for each peak
        values : (Ndata, Npeaks,) ndarray
            peak values
        indices : (Ndata, Npeaks,) ndarray
            peak indices of the directions on the sphere
        """
        peaks_shape = np.r_[self.mask.shape, max_peaks, 3]
        peaks = np.zeros(peaks_shape)
        values = np.zeros(peaks_shape[:-1])
        indices = np.zeros(peaks_shape[:-1])

        fods = self.fod(sphere.vertices)
        mask_pos = np.where(self.mask)
        for pos in zip(*mask_pos):
            fod = fods[pos]
            peaks_, values_, indices_ = peak_directions(
                fod, sphere, relative_peak_threshold, min_separation_angle)
            # if less peaks than max_peaks are found, only take those.
            Npeaks = np.min([len(indices_), max_peaks])
            peaks[pos][:Npeaks] = peaks_[:Npeaks]
            values[pos][:Npeaks] = values_[:Npeaks]
            indices[pos][:Npeaks] = indices_[:Npeaks]
        return peaks, values, indices
Exemplo n.º 5
0
def makefib(input, output, mask=None, n_fibers=3):
    """
    Converts a NifTi ``.nii`` file containing sh coefficients to a DSI
    Studio fib file

    This function uses ``sh2amp`` to get amplitude values for each
    direction in DSI Studio's ``odf8`` direction set. These values are
    masked and loaded into the "odfN" matrices in the fib file.

    Parameters
    ----------
    input : str
        Path to input nifti file containing SH coefficients
    output : str
        Path to output fib file; must end with .fib
    mask : str, optional
        Path to nifti file containing brain mask
    n_fibers : int, optional
        The maximum number ODF maxima to extract per voxel
        (Default: 3)

    Returns
    -------
    None, writes out file
    """
    if not op.exists(input):
        raise OSError('Input path does not exist. Please ensure that '
                      'the folder or file specified exists.')
    if not op.exists(op.dirname(output)):
        raise OSError('Specifed directory for output file {} does not '
                      'exist. Please ensure that this is a valid '
                      'directory.'.format(op.dirname(output)))
    if op.splitext(input)[-1] != '.nii':
        raise IOError('Input file needs to specified as a NifTI '
        '(.nii)')
    if op.splitext(output)[-1] != '.fib':
        raise IOError('Output file needs to specified as a .fib file')
    if not mask is None:
        if not op.exists(mask):
            raise OSError('Path to brain mask does not exist. Please '
            'ensure that the folder specified exists.')
    outdir = op.dirname(output)
    # Convert to LPS
    fname, ext = op.splitext(op.basename(input))
    fname = fname +'_lps'
    input_ = op.join(outdir, fname + ext)
    if not mask is None:
        (fname, ext) = op.splitext(op.basename(mask))
        fname = fname +'_lps'
        mask_ = op.join(outdir, fname + ext)
    convertLPS(input, input_)
    convertLPS(mask, mask_)
    # Get ODF geometry
    verts, faces = get_dsi_studio_ODF_geometry("odf8")
    num_dirs, _ = verts.shape
    hemisphere = num_dirs // 2
    x, y, z = verts[:hemisphere].T
    hs = HemiSphere(x=x, y=y, z=z)
    # Convert to DSI Studio LPS+ from MRTRIX3 RAS+
    _, theta, phi = cart2sphere(-x, -y, z)
    dirs_txt = op.join(outdir, "directions.txt")
    np.savetxt(dirs_txt, np.column_stack([phi, theta]))
    # Get SH amplitude
    odf_amplitudes_nii = op.join(outdir, 'amplitudes.nii')
    arg = [
        'sh2amp',
        '-quiet',
        '-force',
        '-nonnegative',
        input_, dirs_txt, odf_amplitudes_nii
    ]
    completion = subprocess.run(arg)
    if completion.returncode != 0:
        raise Exception('Failed to determine amplitude of SH '
        'coefficients. Check above for errors.')
    # Load images
    amplitudes_img = nib.load(odf_amplitudes_nii)
    ampl_data = amplitudes_img.get_fdata()
    if not mask is None:
        mask_img = nib.load(mask_)
        if not np.allclose(mask_img.affine, amplitudes_img.affine):
            raise ValueError('Differing orientation between mask and '
            'amplitudes.')
        if not mask_img.shape == amplitudes_img.shape[:3]:
            raise ValueError('Differing grid between mask and '
            'amplitudes')
        mask_img = mask_img.get_fdata()
    else:
        mask_img = np.ones((ampl_data.shape[0], ampl_data.shape[1],
        ampl_data.shape[2]), order='F')
    # Make flat mask
    flat_mask = mask_img.flatten(order='F') > 0
    odf_array = ampl_data.reshape(-1, ampl_data.shape[3], order="F")
    masked_odfs = odf_array[flat_mask, :]
    n_odfs = masked_odfs.shape[0]
    peak_indices = np.zeros((n_odfs, n_fibers))
    peak_vals = np.zeros((n_odfs, n_fibers))
    dsi_mat = {}
    # Create matfile that can be read by dsi Studio
    dsi_mat['dimension'] = np.array(amplitudes_img.shape[:3])
    dsi_mat['voxel_size'] = np.array(amplitudes_img.header.get_zooms()[:3])
    n_voxels = int(np.prod(dsi_mat['dimension']))
    for odfnum in tqdm(range(masked_odfs.shape[0]),
        desc='ODF Peak Detection',
        bar_format='{desc}: [{percentage:0.0f}%]',
        unit='vox',
        ncols=tqdmWidth):
        dirs, vals, indices = peak_directions(masked_odfs[odfnum], hs)
        for dirnum, (val, idx) in enumerate(zip(vals, indices)):
            if dirnum == n_fibers:
                break
            peak_indices[odfnum, dirnum] = idx
            peak_vals[odfnum, dirnum] = val
    for nfib in range(n_fibers):
        # fill in the "fa" values
        fa_n = np.zeros(n_voxels)
        fa_n[flat_mask] = peak_vals[:, nfib]
        dsi_mat['fa%d' % nfib] = fa_n.astype(np.float32)
        # Fill in the index values
        index_n = np.zeros(n_voxels)
        index_n[flat_mask] = peak_indices[:, nfib]
        dsi_mat['index%d' % nfib] = index_n.astype(np.int16)
    # Add in the ODFs
    num_odf_matrices = n_odfs // ODF_COLS
    split_indices = (np.arange(num_odf_matrices) + 1) * ODF_COLS
    odf_splits = np.array_split(masked_odfs, split_indices, axis=0)
    for splitnum, odfs in enumerate(odf_splits):
        dsi_mat['odf%d' % splitnum] = odfs.T.astype(np.float32)
    dsi_mat['odf_vertices'] = verts.T
    dsi_mat['odf_faces'] = faces.T
    dsi_mat['z0'] = np.array([1.])
    savemat(output, dsi_mat, format='4', appendmat=False)
    # Remove unwanted files
    os.remove(input_)
    os.remove(mask_)
    os.remove(dirs_txt)
    os.remove(odf_amplitudes_nii)
Exemplo n.º 6
0
def amplitudes_to_fibgz(amplitudes_img,
                        odf_dirs,
                        odf_faces,
                        output_file,
                        mask_img,
                        num_fibers=5,
                        unit_odf=False):
    """Convert a NiftiImage of ODF amplitudes to a DSI Studio fib file.

    Parameters:
    ===========

    amplitudes_img: nb.Nifti1Image
        4d NIfTI image that contains amplitudes for the ODFs
    odf_dirs: np.ndarray
        N x 3 array containing the directions corresponding to the
        amplitudes in ``amplitudes_img``. The values in
        ``amplitudes_img.get_data()[..., i]`` are for the
        direction in ``odf_dirs[i]``.
    odf_faces: np.ndarray
        triangles connecting the vertices in ``odf_dirs``
    output_file: str
        Path where the output fib file will be written.
    mask_img: nb.Nifti1Image
        3d Image that is nonzero where voxels contain brain.
    num_fibers: int
        The maximum number of fibers/fixels stored in each voxel.

    Returns:
    ========

    None


    """
    num_dirs, _ = odf_dirs.shape
    hemisphere = num_dirs // 2
    x, y, z = odf_dirs[:hemisphere].T
    hs = HemiSphere(x=x, y=y, z=z)

    if not np.allclose(mask_img.affine, amplitudes_img.affine):
        raise ValueError("Differing orientation between mask and amplitudes")
    if not mask_img.shape == amplitudes_img.shape[:3]:
        raise ValueError("Differing grid between mask and amplitudes")

    # Get the flat mask
    ampl_data = amplitudes_img.get_data()
    flat_mask = mask_img.get_data().flatten(order="F") > 0
    odf_array = ampl_data.reshape(-1, ampl_data.shape[3], order="F")
    del ampl_data
    masked_odfs = odf_array[flat_mask, :]
    z0 = masked_odfs.max()
    masked_odfs = masked_odfs / z0
    masked_odfs[masked_odfs < 0] = 0
    masked_odfs = masked_odfs.astype(np.float)

    if unit_odf:
        sums = masked_odfs.sum(1)
        sums[sums == 0] = 1
        masked_odfs = masked_odfs / sums[:, np.newaxis]

    n_odfs = masked_odfs.shape[0]
    peak_indices = np.zeros((n_odfs, num_fibers))
    peak_vals = np.zeros((n_odfs, num_fibers))

    dsi_mat = {}
    # Create matfile that can be read by dsi Studio
    dsi_mat['dimension'] = np.array(amplitudes_img.shape[:3])
    dsi_mat['voxel_size'] = np.array(amplitudes_img.header.get_zooms()[:3])
    n_voxels = int(np.prod(dsi_mat['dimension']))
    LOGGER.info("Detecting Peaks")
    for odfnum in range(masked_odfs.shape[0]):
        dirs, vals, indices = peak_directions(masked_odfs[odfnum], hs)
        for dirnum, (val, idx) in enumerate(zip(vals, indices)):
            if dirnum == num_fibers:
                break
            peak_indices[odfnum, dirnum] = idx
            peak_vals[odfnum, dirnum] = val

    for nfib in range(num_fibers):
        # fill in the "fa" values
        fa_n = np.zeros(n_voxels)
        fa_n[flat_mask] = peak_vals[:, nfib]
        dsi_mat['fa%d' % nfib] = fa_n.astype(np.float32)

        # Fill in the index values
        index_n = np.zeros(n_voxels)
        index_n[flat_mask] = peak_indices[:, nfib]
        dsi_mat['index%d' % nfib] = index_n.astype(np.int16)

    # Add in the ODFs
    num_odf_matrices = n_odfs // ODF_COLS
    split_indices = (np.arange(num_odf_matrices) + 1) * ODF_COLS
    odf_splits = np.array_split(masked_odfs, split_indices, axis=0)
    for splitnum, odfs in enumerate(odf_splits):
        dsi_mat['odf%d' % splitnum] = odfs.T.astype(np.float32)

    dsi_mat['odf_vertices'] = odf_dirs.T
    dsi_mat['odf_faces'] = odf_faces.T
    dsi_mat['z0'] = np.array([z0])
    savemat(output_file, dsi_mat, format='4', appendmat=False)
Exemplo n.º 7
0
"""
For peak detection, `peaks_from_model` cannot be used as it doesn't support
global fitting approaches. Instead, we'll compute our peaks using a for loop.
"""

from dipy.direction import peak_directions

shape = odf.shape[:3]
npeaks = 5  # maximum number of peaks returned for a given voxel
peak_dirs = np.zeros((shape + (npeaks, 3)))
peak_values = np.zeros((shape + (npeaks,)))

for idx in np.ndindex(shape):  # iterate through each voxel
    # Get peaks of odf
    direction, pk, _ = peak_directions(odf[idx], sphere,
                                       relative_peak_threshold=0.5,
                                       min_separation_angle=25)

    # Calculate peak metrics
    if pk.shape[0] != 0:
        n = min(npeaks, pk.shape[0])
        peak_dirs[idx][:n] = direction[:n]
        peak_values[idx][:n] = pk[:n]

# Scale up for visualization
peak_values = np.clip(peak_values * 15, 0, 1)

fodf_peaks = actor.peak_slicer(peak_dirs[:, :, 0:1, :],
                               peak_values[:, :, 0:1, :])
scene.add(fodf_peaks)