예제 #1
0
파일: tools.py 프로젝트: arnaudbore/scilpy
def cut_between_masks_streamlines(sft, binary_mask, min_len=0):
    """ Cut streamlines so their segment are within the bounding box
    or going from binary mask #1 to binary mask #2.
    This function erases the data_per_point and data_per_streamline.

    Parameters
    ----------
    sft: StatefulTractogram
        The sft to cut streamlines (using a single mask with 2 entities) from.
    binary_mask: np.ndarray
        Boolean array representing the region (must contain 2 entities)
    min_len: float
        Minimum length from the resulting streamlines.
    Returns
    -------
    new_sft : StatefulTractogram
        New object with the streamlines trimmed within the masks.
    """
    sft.to_vox()
    sft.to_corner()
    streamlines = sft.streamlines

    density = get_endpoints_density_map(streamlines, binary_mask.shape)
    density[density > 0] = 1
    density[binary_mask == 0] = 0

    roi_data_1, roi_data_2 = split_heads_tails_kmeans(binary_mask)

    new_streamlines = []
    (indices, points_to_idx) = uncompress(streamlines, return_mapping=True)

    for strl_idx, strl in enumerate(streamlines):
        strl_indices = indices[strl_idx]

        in_strl_idx, out_strl_idx = intersects_two_rois(
            roi_data_1, roi_data_2, strl_indices)

        if in_strl_idx is not None and out_strl_idx is not None:
            points_to_indices = points_to_idx[strl_idx]
            tmp = compute_streamline_segment(strl, strl_indices, in_strl_idx,
                                             out_strl_idx, points_to_indices)
            new_streamlines.append(tmp)

    new_sft = StatefulTractogram.from_sft(new_streamlines, sft)
    return filter_streamlines_by_length(new_sft, min_length=min_len)
예제 #2
0
def get_binary_maps(streamlines, sft):
    """
    Extract a mask from a bundle

    Parameters
    ----------
    streamlines: list
        List of streamlines.
    dimensions: tuple of ints
        Dimensions of the mask.
    sft : StatefulTractogram
        Reference tractogram.
    invalid: bool
        If true, remove invalid streamlines from tractogram.

    Returns
    -------
    bundles_voxels: numpy.ndarray
        Mask representing the bundle volume.
    endpoints_voxels: numpy.ndarray
        Mask representing the bundle's endpoints.
    """
    dimensions = sft.dimensions
    if not len(streamlines):
        return np.zeros(dimensions), np.zeros(dimensions)
    elif len(streamlines) == 1:
        streamlines = [streamlines]
    tmp_sft = StatefulTractogram.from_sft(streamlines, sft)
    tmp_sft.to_vox()
    tmp_sft.to_corner()

    if len(tmp_sft) == 1:
        return np.zeros(dimensions), np.zeros(dimensions)

    bundles_voxels = compute_tract_counts_map(tmp_sft.streamlines,
                                              dimensions).astype(np.int16)

    endpoints_voxels = get_endpoints_density_map(tmp_sft.streamlines,
                                                 dimensions).astype(np.int16)

    bundles_voxels[bundles_voxels > 0] = 1
    endpoints_voxels[endpoints_voxels > 0] = 1

    return bundles_voxels, endpoints_voxels
def compute_measures(filename_tuple):
    sft = load_tractogram(filename_tuple[0], filename_tuple[1])
    _, dimensions, voxel_size, _ = sft.space_attributes

    nbr_streamlines = len(sft)
    if not nbr_streamlines:
        logging.warning('{} is empty'.format(filename_tuple[0]))
        return dict(
            zip([
                'volume', 'volume_endpoints', 'streamlines_count',
                'avg_length', 'std_length', 'min_length', 'max_length',
                'mean_curvature'
            ], [0, 0, 0, 0, 0, 0, 0, 0]))

    length_list = list(length(list(sft.streamlines)))
    length_avg = float(np.average(length_list))
    length_std = float(np.std(length_list))
    length_min = float(np.min(length_list))
    length_max = float(np.max(length_list))

    sft.to_vox()
    sft.to_corner()
    streamlines = sft.streamlines
    density = compute_tract_counts_map(streamlines, dimensions)
    endpoints_density = get_endpoints_density_map(streamlines, dimensions)

    curvature_list = np.zeros((nbr_streamlines, ))
    for i in range(nbr_streamlines):
        curvature_list[i] = mean_curvature(sft.streamlines[i])

    return dict(
        zip([
            'volume', 'volume_endpoints', 'streamlines_count', 'avg_length',
            'std_length', 'min_length', 'max_length', 'mean_curvature'
        ], [
            np.count_nonzero(density) * np.product(voxel_size),
            np.count_nonzero(endpoints_density) * np.product(voxel_size),
            nbr_streamlines, length_avg, length_std, length_min, length_max,
            float(np.mean(curvature_list))
        ]))
예제 #4
0
def load_data_tmp_saving(args):
    filename = args[0]
    reference = args[1]
    init_only = args[2]
    disable_centroids = args[3]

    # Since data is often re-use when comparing multiple bundles, anything
    # that can be computed once is saved temporarily and simply loaded on demand
    hash_tmp = hashlib.md5(filename.encode()).hexdigest()
    tmp_density_filename = os.path.join('tmp_measures/',
                                        '{}_density.nii.gz'.format(hash_tmp))
    tmp_endpoints_filename = os.path.join('tmp_measures/',
                                          '{}_endpoints.nii.gz'.format(hash_tmp))
    tmp_centroids_filename = os.path.join('tmp_measures/',
                                          '{}_centroids.trk'.format(hash_tmp))

    sft = load_tractogram(filename, reference)
    sft.to_vox()
    sft.to_corner()
    streamlines = sft.get_streamlines_copy()
    if not streamlines:
        if init_only:
            logging.warning('{} is empty'.format(filename))
        return None

    if os.path.isfile(tmp_density_filename) \
            and os.path.isfile(tmp_endpoints_filename) \
            and os.path.isfile(tmp_centroids_filename):
        # If initilization, loading the data is useless
        if init_only:
            return None
        density = nib.load(tmp_density_filename).get_fdata(dtype=np.float32)
        endpoints_density = nib.load(tmp_endpoints_filename).get_fdata(dtype=np.float32)
        sft_centroids = load_tractogram(tmp_centroids_filename, reference)
        sft_centroids.to_vox()
        sft_centroids.to_corner()
        centroids = sft_centroids.get_streamlines_copy()
    else:
        transformation, dimensions, _, _ = sft.space_attributes
        density = compute_tract_counts_map(streamlines, dimensions)
        endpoints_density = get_endpoints_density_map(streamlines, dimensions,
                                                      point_to_select=3)
        thresholds = [32, 24, 12, 6]
        if disable_centroids:
            centroids = []
        else:
            centroids = qbx_and_merge(streamlines, thresholds,
                                      rng=RandomState(0),
                                      verbose=False).centroids

        # Saving tmp files to save on future computation
        nib.save(nib.Nifti1Image(density.astype(np.float32), transformation),
                 tmp_density_filename)
        nib.save(nib.Nifti1Image(endpoints_density.astype(np.int16),
                                 transformation),
                 tmp_endpoints_filename)

        # Saving in vox space and corner.
        centroids_sft = StatefulTractogram.from_sft(centroids, sft)
        save_tractogram(centroids_sft, tmp_centroids_filename)

    return density, endpoints_density, streamlines, centroids
예제 #5
0
def compute_measures(filename_tuple):
    sft = load_tractogram(filename_tuple[0], filename_tuple[1])
    _, dimensions, voxel_size, _ = sft.space_attributes
    uniformize_bundle_sft(sft)
    nbr_streamlines = len(sft)
    if not nbr_streamlines:
        logging.warning('{} is empty'.format(filename_tuple[0]))
        return dict(
            zip([
                'volume', 'volume_endpoints', 'streamlines_count',
                'avg_length', 'std_length', 'min_length', 'max_length', 'span',
                'curl', 'diameter', 'elongation', 'surface_area',
                'end_surface_area_head', 'end_surface_area_tail',
                'radius_head', 'radius_tail', 'irregularity',
                'irregularity_of_end_surface_head',
                'irregularity_of_end_surface_tail', 'mean_curvature',
                'fractal_dimension'
            ], [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
                ]))

    streamline_cords = list(sft.streamlines)
    length_list = list(length(streamline_cords))
    length_avg = float(np.average(length_list))
    length_std = float(np.std(length_list))
    length_min = float(np.min(length_list))
    length_max = float(np.max(length_list))

    sft.to_vox()
    sft.to_corner()
    streamlines = sft.streamlines
    density = compute_tract_counts_map(streamlines, dimensions)
    endpoints_density = get_endpoints_density_map(streamlines, dimensions)

    span_list = list(map(compute_span, streamline_cords))
    span = float(np.average(span_list))
    curl = length_avg / span
    volume = np.count_nonzero(density) * np.product(voxel_size)
    diameter = 2 * np.sqrt(volume / (np.pi * length_avg))
    elon = length_avg / diameter

    roi = np.where(density != 0, 1, density)
    surf_area = approximate_surface_node(roi) * (voxel_size[0]**2)
    irregularity = surf_area / (np.pi * diameter * length_avg)

    endpoints_map_head, endpoints_map_tail = \
        get_head_tail_density_maps(sft.streamlines, dimensions)
    endpoints_map_head_roi = \
        np.where(endpoints_map_head != 0, 1, endpoints_map_head)
    endpoints_map_tail_roi = \
        np.where(endpoints_map_tail != 0, 1, endpoints_map_tail)
    end_sur_area_head = \
        approximate_surface_node(endpoints_map_head_roi) * (voxel_size[0] ** 2)
    end_sur_area_tail = \
        approximate_surface_node(endpoints_map_tail_roi) * (voxel_size[0] ** 2)

    endpoints_coords_head = np.array(np.where(endpoints_map_head_roi)).T
    endpoints_coords_tail = np.array(np.where(endpoints_map_tail_roi)).T
    radius_head = 1.5 * np.average(
        np.sqrt(((endpoints_coords_head -
                  np.average(endpoints_coords_head, axis=0))**2).sum(axis=1)))
    radius_tail = 1.5 * np.average(
        np.sqrt(((endpoints_coords_tail -
                  np.average(endpoints_coords_tail, axis=0))**2).sum(axis=1)))
    end_irreg_head = (np.pi * radius_head**2) / end_sur_area_head
    end_irreg_tail = (np.pi * radius_tail**2) / end_sur_area_tail

    fractal_dimension = compute_fractal_dimension(density)

    curvature_list = np.zeros((nbr_streamlines, ))
    for i in range(nbr_streamlines):
        curvature_list[i] = mean_curvature(sft.streamlines[i])

    return dict(
        zip([
            'volume', 'volume_endpoints', 'streamlines_count', 'avg_length',
            'std_length', 'min_length', 'max_length', 'span', 'curl',
            'diameter', 'elongation', 'surface_area', 'end_surface_area_head',
            'end_surface_area_tail', 'radius_head', 'radius_tail',
            'irregularity', 'irregularity_of_end_surface_head',
            'irregularity_of_end_surface_tail', 'mean_curvature',
            'fractal_dimension'
        ], [
            volume,
            np.count_nonzero(endpoints_density) * np.product(voxel_size),
            nbr_streamlines, length_avg, length_std, length_min, length_max,
            span, curl, diameter, elon, surf_area, end_sur_area_head,
            end_sur_area_tail, radius_head, radius_tail, irregularity,
            end_irreg_head, end_irreg_tail,
            float(np.mean(curvature_list)), fractal_dimension
        ]))