Example #1
0
def test_find_cut_coords():
    data = np.zeros((100, 100, 100))
    x_map, y_map, z_map = 50, 10, 40
    data[x_map - 30:x_map + 30, y_map - 3:y_map + 3, z_map - 10:z_map + 10] = 1

    # identity affine
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, mask=np.ones(data.shape, np.bool))
    np.testing.assert_allclose(
        (x, y, z),
        (x_map, y_map, z_map),
        # Need such a high tolerance for the test to
        # pass. x, y, z = [49.5, 9.5, 39.5]
        rtol=6e-2)

    # non-trivial affine
    affine = np.diag([1. / 2, 1 / 3., 1 / 4., 1.])
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, mask=np.ones(data.shape, np.bool))
    np.testing.assert_allclose(
        (x, y, z),
        (x_map / 2., y_map / 3., z_map / 4.),
        # Need such a high tolerance for the test to
        # pass. x, y, z = [24.75, 3.17, 9.875]
        rtol=6e-2)

    # regression test (cf. #473)
    # test case: no data exceeds the activation threshold
    data = np.ones((36, 43, 36))
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, activation_threshold=1.1)
    np.testing.assert_array_equal(np.array([x, y, z]),
                                  0.5 * np.array(data.shape).astype(np.float))
Example #2
0
def test_find_cut_coords():
    data = np.zeros((100, 100, 100))
    x_map, y_map, z_map = 50, 10, 40
    data[x_map - 30:x_map + 30, y_map - 3:y_map + 3, z_map - 10:z_map + 10] = 1

    # identity affine
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, mask=np.ones(data.shape, np.bool))
    np.testing.assert_allclose((x, y, z),
                               (x_map, y_map, z_map),
                               # Need such a high tolerance for the test to
                               # pass. x, y, z = [49.5, 9.5, 39.5]
                               rtol=6e-2)

    # non-trivial affine
    affine = np.diag([1. / 2, 1 / 3., 1 / 4., 1.])
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, mask=np.ones(data.shape, np.bool))
    np.testing.assert_allclose((x, y, z),
                               (x_map / 2., y_map / 3., z_map / 4.),
                               # Need such a high tolerance for the test to
                               # pass. x, y, z = [24.75, 3.17, 9.875]
                               rtol=6e-2)

    # regression test (cf. #473)
    # test case: no data exceeds the activation threshold
    data = np.ones((36, 43, 36))
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, activation_threshold=1.1)
    np.testing.assert_array_equal(
        np.array([x, y, z]),
        0.5 * np.array(data.shape).astype(np.float))
Example #3
0
def test_find_cut_coords():
    data = np.zeros((100, 100, 100))
    x_map, y_map, z_map = 50, 10, 40
    data[x_map - 30:x_map + 30, y_map - 3:y_map + 3, z_map - 10:z_map + 10] = 1

    # identity affine
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, mask=np.ones(data.shape, np.bool))
    np.testing.assert_allclose(
        (x, y, z),
        (x_map, y_map, z_map),
        # Need such a high tolerance for the test to
        # pass. x, y, z = [49.5, 9.5, 39.5]
        rtol=6e-2)

    # non-trivial affine
    affine = np.diag([1. / 2, 1 / 3., 1 / 4., 1.])
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, mask=np.ones(data.shape, np.bool))
    np.testing.assert_allclose(
        (x, y, z),
        (x_map / 2., y_map / 3., z_map / 4.),
        # Need such a high tolerance for the test to
        # pass. x, y, z = [24.75, 3.17, 9.875]
        rtol=6e-2)
def test_find_cut_coords():
    data = np.zeros((100, 100, 100))
    x_map, y_map, z_map = 50, 10, 40
    data[x_map - 30:x_map + 30, y_map - 3:y_map + 3, z_map - 10:z_map + 10] = 1

    # identity affine
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    mask_img = compute_epi_mask(img)
    x, y, z = find_xyz_cut_coords(img,
                                  mask_img=mask_img)

    np.testing.assert_allclose((x, y, z),
                               (x_map, y_map, z_map),
                               # Need such a high tolerance for the test to
                               # pass. x, y, z = [49.5, 9.5, 39.5]
                               rtol=6e-2)

    # non-trivial affine
    affine = np.diag([1. / 2, 1 / 3., 1 / 4., 1.])
    img = nibabel.Nifti1Image(data, affine)
    mask_img = compute_epi_mask(img)
    x, y, z = find_xyz_cut_coords(img, mask_img=mask_img)
    np.testing.assert_allclose((x, y, z),
                               (x_map / 2., y_map / 3., z_map / 4.),
                               # Need such a high tolerance for the test to
                               # pass. x, y, z = [24.75, 3.17, 9.875]
                               rtol=6e-2)

    # regression test (cf. #473)
    # test case: no data exceeds the activation threshold
    data = np.ones((36, 43, 36))
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, activation_threshold=1.1)
    np.testing.assert_array_equal(
        np.array([x, y, z]),
        0.5 * np.array(data.shape).astype(np.float))

    # regression test (cf. #922)
    # pseudo-4D images as input (i.e., X, Y, Z, 1)
    # previously raised "ValueError: too many values to unpack"
    rng = np.random.RandomState(42)
    data_3d = rng.randn(10, 10, 10)
    data_4d = data_3d[..., np.newaxis]
    affine = np.eye(4)
    img_3d = nibabel.Nifti1Image(data_3d, affine)
    img_4d = nibabel.Nifti1Image(data_4d, affine)
    assert_equal(find_xyz_cut_coords(img_3d), find_xyz_cut_coords(img_4d))
Example #5
0
def _get_cut_slices(stat_map_img, cut_coords=None, threshold=None):
    """For internal use. Find slice numbers for the cut.
    """
    # Select coordinates for the cut
    if cut_coords is None:
        cut_coords = find_xyz_cut_coords(stat_map_img,
                                         activation_threshold=threshold)

    # Convert cut coordinates into cut slices
    try:
        cut_slices = apply_affine(np.linalg.inv(stat_map_img.affine),
                                  cut_coords)
    except ValueError:
        raise ValueError(
            "The input given for display_mode='ortho' needs to be "
            "a list of 3d world coordinates in (x, y, z). "
            "You provided cut_coords={0}".format(cut_coords))
    except IndexError:
        raise ValueError(
            "The input given for display_mode='ortho' needs to be "
            "a list of 3d world coordinates in (x, y, z). "
            "You provided single cut, cut_coords={0}".format(cut_coords))

    return cut_slices
def test_fast_abs_percentile_no_index_error_find_cuts():
    # check that find_cuts functions are safe
    data = np.array([[[1., 2.], [3., 4.]], [[0., 0.], [0., 0.]]])
    img = nibabel.Nifti1Image(data, np.eye(4))
    assert_equal(len(find_xyz_cut_coords(img)), 3)
Example #7
0
def get_stat_map(stat_map_img,
                 bg_img,
                 cut_coords=None,
                 colorbar=True,
                 title=None,
                 threshold=None,
                 annotate=True,
                 draw_cross=True,
                 black_bg='auto',
                 cmap=cm.cold_hot,
                 symmetric_cbar='auto',
                 dim='auto',
                 vmax=None,
                 resampling_interpolation='continuous',
                 n_colors=256,
                 opacity=1,
                 **kwargs):
    """
    Intarctive viewer of a statistical map, with optional background

    Parameters
    ----------
    stat_map_img : Niimg-like object
        See http://nilearn.github.io/manipulating_images/input_output.html
        The statistical map image.
    bg_img : Niimg-like object (default='MNI152')
        See http://nilearn.github.io/manipulating_images/input_output.html
        The background image that the stat map will be plotted on top of.
        If nothing is specified, the MNI152 template will be used.
        To turn off background image, just pass "bg_img=False".
    cut_coords : None, a tuple of floats, or an integer (default None)
        The MNI coordinates of the point where the cut is performed
        as a 3-tuple: (x, y, z). If None is given, the cuts is calculated
        automaticaly.
        This parameter is not currently supported.
    colorbar : boolean, optional (default True)
        If True, display a colorbar next to the plots.
    title : string or None (default=None)
        The title displayed on the figure (or None: no title).
        This parameter is not currently supported.
    threshold : str, number or None  (default=None)
        If None is given, the image is not thresholded.
        If a number is given, it is used to threshold the image:
        values below the threshold (in absolute value) are plotted
        as transparent. If auto is given, the threshold is determined
        magically by analysis of the image.
    annotate : boolean (default=True)
        If annotate is True, positions and left/right annotation
        are added to the plot.
    draw_cross : boolean (default=True)
        If draw_cross is True, a cross is drawn on the plot to
        indicate the cut plosition.
    black_bg : boolean (default='auto')
        If True, the background of the image is set to be black.
        Otherwise, a white background is used.
        If set to auto, an educated guess is made to find if the background
        is white or black.
    cmap : matplotlib colormap, optional
        The colormap for specified image. The colormap *must* be
        symmetrical.
    symmetric_cbar : boolean or 'auto' (default='auto')
        Specifies whether the colorbar should range from -vmax to vmax
        or from vmin to vmax. Setting to 'auto' will select the latter if
        the range of the whole image is either positive or negative.
        Note: The colormap will always be set to range from -vmax to vmax.
    dim : float, 'auto' (default='auto')
        Dimming factor applied to background image. By default, automatic
        heuristics are applied based upon the background image intensity.
        Accepted float values, where a typical scan is between -2 and 2
        (-2 = increase constrast; 2 = decrease contrast), but larger values
        can be used for a more pronounced effect. 0 means no dimming.
    vmax : float, or None (default=)
        max value for mapping colors.
    resampling_interpolation : string, optional (default nearest)
        The interpolation method for resampling
        See nilearn.image.resample_img
    n_colors : integer (default=256)
        The number of discrete colors to use in the colormap, if it is
        generated.
    opacity : float in [0,1] (default 1)
        The level of opacity of the overlay (0: transparent, 1: opaque)

    Returns
    -------
    StatMapView : plot of the stat map.
        It can be saved as an html page or rendered (transparently) by the
        Jupyter notebook.
    """

    # Load stat map
    stat_map_img = check_niimg_3d(stat_map_img, dtype='auto')

    _, _, vmin, vmax = _get_colorbar_and_data_ranges(
        _safe_get_data(stat_map_img, ensure_finite=True), vmax, symmetric_cbar,
        kwargs)

    # load background image, and resample stat map
    if bg_img is not None and bg_img is not False:
        bg_img, black_bg, bg_min, bg_max = _load_anat(bg_img,
                                                      dim=dim,
                                                      black_bg=black_bg)
        bg_img = _resample_to_self(bg_img,
                                   interpolation=resampling_interpolation)
        stat_map_img = image.resample_to_img(
            stat_map_img, bg_img, interpolation=resampling_interpolation)

    else:
        stat_map_img = _resample_to_self(
            stat_map_img, interpolation=resampling_interpolation)
        bg_img = image.new_img_like(stat_map_img, np.zeros(stat_map_img.shape),
                                    stat_map_img.affine)
        bg_min = 0
        bg_max = 0
        if black_bg == 'auto':
            black_bg = False

    # Select coordinates for the cut
    # https://github.com/nilearn/nilearn/blob/master/nilearn/plotting/displays.py#L943
    if isinstance(cut_coords, numbers.Number):
        raise ValueError(
            "The input given for display_mode='ortho' needs to be "
            "a list of 3d world coordinates in (x, y, z). "
            "You provided single cut, cut_coords={0}".format(cut_coords))
    if cut_coords is None:
        cut_coords = find_xyz_cut_coords(stat_map_img,
                                         activation_threshold=threshold)
    print(cut_coords)

    # Create a base64 sprite for the background
    bg_sprite = BytesIO()
    save_sprite(bg_img,
                output_sprite=bg_sprite,
                cmap='gray',
                format='jpg',
                resample=False,
                vmin=bg_min,
                vmax=bg_max)
    bg_sprite.seek(0)
    bg_base64 = encodebytes(bg_sprite.read()).decode('utf-8')
    bg_sprite.close()

    # Create a base64 sprite for the stat map
    # Possibly, also generate a file with the colormap
    stat_map_sprite = BytesIO()
    stat_map_json = StringIO()
    if colorbar:
        stat_map_cm = BytesIO()
    else:
        stat_map_cm = None
    cmap_c = _custom_cmap(cmap, vmin, vmax, threshold)
    save_sprite(stat_map_img, stat_map_sprite, stat_map_cm, stat_map_json,
                vmax, vmin, cmap_c, threshold, n_colors, 'png', False)

    # Convert the sprite and colormap to base64
    stat_map_sprite.seek(0)
    stat_map_base64 = encodebytes(stat_map_sprite.read()).decode('utf-8')
    stat_map_sprite.close()

    if colorbar:
        stat_map_cm.seek(0)
        cm_base64 = encodebytes(stat_map_cm.read()).decode('utf-8')
        stat_map_cm.close()
    else:
        cm_base64 = ''
    # Load the sprite meta-data from the json dump
    stat_map_json.seek(0)
    params = json.load(stat_map_json)
    stat_map_json.close()

    # Convet cut coordinates into cut slices
    cut_slices = np.round(
        nb.affines.apply_affine(np.linalg.inv(stat_map_img.affine),
                                cut_coords))

    # Create a json-like structure
    # with all the brain sprite parameters
    sprite_params = {
        'canvas': '3Dviewer',
        'sprite': 'spriteImg',
        'nbSlice': params['nbSlice'],
        'overlay': {
            'sprite': 'overlayImg',
            'nbSlice': params['nbSlice'],
            'opacity': opacity
        },
        'colorBackground': '#000000',
        'colorFont': '#ffffff',
        'colorCrosshair': '#de101d',
        'crosshair': draw_cross,
        'affine': params['affine'],
        'flagCoordinates': annotate,
        'title': title,
        'flagValue': annotate,
        'numSlice': {
            'X': cut_slices[0],
            'Y': cut_slices[1],
            'Z': cut_slices[2]
        },
    }
    if colorbar:
        sprite_params['colorMap'] = {
            'img': 'colorMap',
            'min': params['min'],
            'max': params['max']
        }

    return sprite_params, bg_base64, stat_map_base64, cm_base64
Example #8
0
def test_fast_abs_percentile_no_index_error_find_cuts():
    # check that find_cuts functions are safe
    data = np.array([[[1., 2.], [3., 4.]], [[0., 0.], [0., 0.]]])
    img = nibabel.Nifti1Image(data, np.eye(4))
    assert_equal(len(find_xyz_cut_coords(img)), 3)
Example #9
0
def test_find_cuts_empty_mask_no_crash():
    img = nibabel.Nifti1Image(np.ones((2, 2, 2)), np.eye(4))
    mask_img = compute_epi_mask(img)
    with pytest.warns(UserWarning):
        cut_coords = find_xyz_cut_coords(img, mask_img=mask_img)
    np.testing.assert_array_equal(cut_coords, [.5, .5, .5])
Example #10
0
def test_find_cut_coords():
    data = np.zeros((100, 100, 100))
    x_map, y_map, z_map = 50, 10, 40
    data[x_map - 30:x_map + 30, y_map - 3:y_map + 3, z_map - 10:z_map + 10] = 1

    # identity affine
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    mask_img = compute_epi_mask(img)
    x, y, z = find_xyz_cut_coords(img, mask_img=mask_img)

    np.testing.assert_allclose(
        (x, y, z),
        (x_map, y_map, z_map),
        # Need such a high tolerance for the test to
        # pass. x, y, z = [49.5, 9.5, 39.5]
        rtol=6e-2)

    # non-trivial affine
    affine = np.diag([1. / 2, 1 / 3., 1 / 4., 1.])
    img = nibabel.Nifti1Image(data, affine)
    mask_img = compute_epi_mask(img)
    x, y, z = find_xyz_cut_coords(img, mask_img=mask_img)
    np.testing.assert_allclose(
        (x, y, z),
        (x_map / 2., y_map / 3., z_map / 4.),
        # Need such a high tolerance for the test to
        # pass. x, y, z = [24.75, 3.17, 9.875]
        rtol=6e-2)

    # regression test (cf. #473)
    # test case: no data exceeds the activation threshold
    # Cut coords should be the center of mass rather than
    # the center of the image (10, 10, 10).
    data = np.ones((36, 43, 36))
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, activation_threshold=1.1)
    np.testing.assert_array_equal([x, y, z], [17.5, 21., 17.5])

    data = np.zeros((20, 20, 20))
    data[4:6, 4:6, 4:6] = 1000
    img = nibabel.Nifti1Image(data, 2 * np.eye(4))
    mask_data = np.ones((20, 20, 20), dtype=int)
    mask_img = nibabel.Nifti1Image(mask_data, 2 * np.eye(4))
    cut_coords = find_xyz_cut_coords(img, mask_img=mask_img)
    np.testing.assert_array_equal(cut_coords, [9., 9., 9.])

    # Check that a warning is given when all values are masked
    # and that the center of mass is returned
    img = nibabel.Nifti1Image(data, np.eye(4))
    mask_data[np.argwhere(data == 1000)] = 0
    mask_img = nibabel.Nifti1Image(mask_data, np.eye(4))
    with pytest.warns(UserWarning,
                      match=("Could not determine cut coords: "
                             "All values were masked.")):
        cut_coords = find_xyz_cut_coords(img, mask_img=mask_img)
    np.testing.assert_array_equal(cut_coords, [4.5, 4.5, 4.5])

    # Check that a warning is given when all values are masked
    # due to thresholding and that the center of mass is returned
    mask_data = np.ones((20, 20, 20), dtype=int)
    mask_img = nibabel.Nifti1Image(mask_data, np.eye(4))
    with pytest.warns(UserWarning,
                      match=("Could not determine cut coords: "
                             "All voxels were masked by the thresholding.")):
        cut_coords = find_xyz_cut_coords(img,
                                         mask_img=mask_img,
                                         activation_threshold=10**3)
    np.testing.assert_array_equal(cut_coords, [4.5, 4.5, 4.5])

    # regression test (cf. #922)
    # pseudo-4D images as input (i.e., X, Y, Z, 1)
    # previously raised "ValueError: too many values to unpack"
    rng = np.random.RandomState(42)
    data_3d = rng.standard_normal(size=(10, 10, 10))
    data_4d = data_3d[..., np.newaxis]
    affine = np.eye(4)
    img_3d = nibabel.Nifti1Image(data_3d, affine)
    img_4d = nibabel.Nifti1Image(data_4d, affine)
    assert find_xyz_cut_coords(img_3d) == find_xyz_cut_coords(img_4d)

    # test passing empty image returns coordinates pointing to AC-PC line
    data = np.zeros((20, 30, 40))
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    cut_coords = find_xyz_cut_coords(img)
    assert cut_coords == [0.0, 0.0, 0.0]
    with pytest.warns(UserWarning):
        cut_coords = find_xyz_cut_coords(img)