Exemplo n.º 1
0
def test_smooth_img():
    # This function only checks added functionalities compared
    # to _smooth_array()
    shapes = ((10, 11, 12), (13, 14, 15))
    lengths = (17, 18)
    fwhm = (1., 2., 3.)

    img1, mask1 = testing.generate_fake_fmri(shape=shapes[0],
                                             length=lengths[0])
    img2, mask2 = testing.generate_fake_fmri(shape=shapes[1],
                                             length=lengths[1])

    for create_files in (False, True):
        with testing.write_tmp_imgs(img1, img2,
                                    create_files=create_files) as imgs:
            # List of images as input
            out = image.smooth_img(imgs, fwhm)
            assert_true(isinstance(out, list))
            assert_true(len(out) == 2)
            for o, s, l in zip(out, shapes, lengths):
                assert_true(o.shape == (s + (l, )))

            # Single image as input
            out = image.smooth_img(imgs[0], fwhm)
            assert_true(isinstance(out, nibabel.Nifti1Image))
            assert_true(out.shape == (shapes[0] + (lengths[0], )))

    # Check corner case situations when fwhm=0, See issue #1537
    # Test whether function smooth_img raises a warning when fwhm=0.
    assert_warns(UserWarning, image.smooth_img, img1, fwhm=0.)

    # Test output equal when fwhm=None and fwhm=0
    out_fwhm_none = image.smooth_img(img1, fwhm=None)
    out_fwhm_zero = image.smooth_img(img1, fwhm=0.)
    assert_array_equal(out_fwhm_none.get_data(), out_fwhm_zero.get_data())
Exemplo n.º 2
0
def test_resampling_nan():
    # Test that when the data has NaNs they do not propagate to the
    # whole image

    for core_shape in [(3, 5, 4), (3, 5, 4, 2)]:
        # create deterministic data, padded with one
        # voxel thickness of zeros
        core_data = np.arange(np.prod(core_shape)).reshape(core_shape).astype(
            np.float)
        # Introduce a nan
        core_data[2, 2:4, 1] = np.nan
        full_data_shape = np.array(core_shape) + 2
        full_data = np.zeros(full_data_shape)
        full_data[[slice(1, 1 + s) for s in core_shape]] = core_data

        source_img = Nifti1Image(full_data, np.eye(4))

        # Transform real data using easily checkable transformations
        # For now: axis permutations
        axis_permutation = [0, 1, 2]

        # check 3x3 transformation matrix
        target_affine = np.eye(3)[axis_permutation]
        resampled_img = testing.assert_warns(RuntimeWarning,
                                             resample_img,
                                             source_img,
                                             target_affine=target_affine)

        resampled_data = resampled_img.get_data()
        if full_data.ndim == 4:
            axis_permutation.append(3)
        what_resampled_data_should_be = full_data.transpose(axis_permutation)
        non_nan = np.isfinite(what_resampled_data_should_be)

        # Check that the input data hasn't been modified:
        assert_false(np.all(non_nan))

        # Check that for finite value resampling works without problems
        assert_array_almost_equal(resampled_data[non_nan],
                                  what_resampled_data_should_be[non_nan])

        # Check that what was not finite is still not finite
        assert_false(
            np.any(np.isfinite(resampled_data[np.logical_not(non_nan)])))

    # Test with an actual resampling, in the case of a bigish hole
    # This checks the extrapolation mechanism: if we don't do any
    # extrapolation before resampling, the hole creates big
    # artefacts
    data = 10 * np.ones((10, 10, 10))
    data[4:6, 4:6, 4:6] = np.nan
    source_img = Nifti1Image(data, 2 * np.eye(4))
    resampled_img = testing.assert_warns(RuntimeWarning,
                                         resample_img,
                                         source_img,
                                         target_affine=np.eye(4))

    resampled_data = resampled_img.get_data()
    np.testing.assert_allclose(10, resampled_data[np.isfinite(resampled_data)])
Exemplo n.º 3
0
def test_resampling_nan():
    # Test that when the data has NaNs they do not propagate to the
    # whole image

    for core_shape in [(3, 5, 4), (3, 5, 4, 2)]:
        # create deterministic data, padded with one
        # voxel thickness of zeros
        core_data = np.arange(np.prod(core_shape)
                              ).reshape(core_shape).astype(np.float)
        # Introduce a nan
        core_data[2, 2:4, 1] = np.nan
        full_data_shape = np.array(core_shape) + 2
        full_data = np.zeros(full_data_shape)
        full_data[[slice(1, 1 + s) for s in core_shape]] = core_data

        source_img = Nifti1Image(full_data, np.eye(4))

        # Transform real data using easily checkable transformations
        # For now: axis permutations
        axis_permutation = [0, 1, 2]

        # check 3x3 transformation matrix
        target_affine = np.eye(3)[axis_permutation]
        resampled_img = testing.assert_warns(
            RuntimeWarning, resample_img, source_img,
            target_affine=target_affine)

        resampled_data = resampled_img.get_data()
        if full_data.ndim == 4:
            axis_permutation.append(3)
        what_resampled_data_should_be = full_data.transpose(axis_permutation)
        non_nan = np.isfinite(what_resampled_data_should_be)

        # Check that the input data hasn't been modified:
        assert_false(np.all(non_nan))

        # Check that for finite value resampling works without problems
        assert_array_almost_equal(resampled_data[non_nan],
                                  what_resampled_data_should_be[non_nan])

        # Check that what was not finite is still not finite
        assert_false(np.any(np.isfinite(
                        resampled_data[np.logical_not(non_nan)])))

    # Test with an actual resampling, in the case of a bigish hole
    # This checks the extrapolation mechanism: if we don't do any
    # extrapolation before resampling, the hole creates big
    # artefacts
    data = 10 * np.ones((10, 10, 10))
    data[4:6, 4:6, 4:6] = np.nan
    source_img = Nifti1Image(data, 2 * np.eye(4))
    resampled_img = testing.assert_warns(
        RuntimeWarning, resample_img, source_img,
        target_affine=np.eye(4))

    resampled_data = resampled_img.get_data()
    np.testing.assert_allclose(10,
                resampled_data[np.isfinite(resampled_data)])
Exemplo n.º 4
0
def test_screening_space_net():
    for verbose in [0, 2]:
        screening_percentile = assert_warns(UserWarning,
                                            _adjust_screening_percentile, 10,
                                            mask, verbose)
    screening_percentile = assert_warns(UserWarning,
                                        _adjust_screening_percentile, 10, mask)
    # We gave here a very small mask, judging by standards of brain size
    # thus the screening_percentile_ corrected for brain size should
    # be 100%
    assert_equal(screening_percentile, 100)
Exemplo n.º 5
0
def test_screening_space_net():
    for verbose in [0, 2]:
        screening_percentile = assert_warns(UserWarning,
                                            _adjust_screening_percentile, 10,
                                            mask, verbose)
    screening_percentile = assert_warns(UserWarning,
                                        _adjust_screening_percentile, 10, mask)
    # We gave here a very small mask, judging by standards of brain size
    # thus the screening_percentile_ corrected for brain size should
    # be 100%
    assert_equal(screening_percentile, 100)
Exemplo n.º 6
0
def test_load_uniform_ball_cloud():
    for n_points in [10, 20, 40, 80, 160]:
        with warnings.catch_warnings(record=True) as w:
            points = surface._load_uniform_ball_cloud(n_points=n_points)
            assert_array_equal(points.shape, (n_points, 3))
            assert_equal(len(w), 0)
    assert_warns(surface.EfficiencyWarning,
                 surface._load_uniform_ball_cloud, n_points=3)
    for n_points in [3, 10, 20]:
        computed = surface._uniform_ball_cloud(n_points)
        loaded = surface._load_uniform_ball_cloud(n_points)
        assert_array_almost_equal(computed, loaded)
Exemplo n.º 7
0
def test_move_col_id():
    im_terms, col_terms = neurovault._move_col_id(
        {
            'collection_id': 1,
            'not_mni': False
        }, {})
    assert_equal(im_terms, {'not_mni': False})
    assert_equal(col_terms, {'id': 1})

    assert_warns(UserWarning, neurovault._move_col_id, {
        'collection_id': 1,
        'not_mni': False
    }, {'id': 2})
Exemplo n.º 8
0
def test_load_uniform_ball_cloud():
    for n_points in [10, 20, 40, 80, 160]:
        with warnings.catch_warnings(record=True) as w:
            points = surface._load_uniform_ball_cloud(n_points=n_points)
            assert_array_equal(points.shape, (n_points, 3))
            assert_equal(len(w), 0)
    assert_warns(surface.EfficiencyWarning,
                 surface._load_uniform_ball_cloud, n_points=3)
    # before 0.18 k-means was computed differently, so the result
    # would differ from the stored values, computed with version 0.2
    if LooseVersion(sklearn.__version__) >= LooseVersion('0.18'):
        for n_points in [3, 10, 20]:
            computed = surface._uniform_ball_cloud(n_points)
            loaded = surface._load_uniform_ball_cloud(n_points)
            assert_array_almost_equal(computed, loaded)
Exemplo n.º 9
0
def test_load_uniform_ball_cloud():
    for n_points in [10, 20, 40, 80, 160]:
        with warnings.catch_warnings(record=True) as w:
            points = surface._load_uniform_ball_cloud(n_points=n_points)
            assert_array_equal(points.shape, (n_points, 3))
            assert_equal(len(w), 0)
    assert_warns(surface.EfficiencyWarning,
                 surface._load_uniform_ball_cloud,
                 n_points=3)
    # before 0.18 k-means was computed differently, so the result
    # would differ from the stored values, computed with version 0.2
    if LooseVersion(sklearn.__version__) >= LooseVersion('0.18'):
        for n_points in [3, 10, 20]:
            computed = surface._uniform_ball_cloud(n_points)
            loaded = surface._load_uniform_ball_cloud(n_points)
            assert_array_almost_equal(computed, loaded)
Exemplo n.º 10
0
def test_find_cut_coords():
    data = np.zeros((100, 100, 100))
    x_map, y_map, z_map = 50, 10, 40
    data[x_map - 30:x_map + 30, y_map - 3:y_map + 3, z_map - 10:z_map + 10] = 1

    # identity affine
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    mask_img = compute_epi_mask(img)
    x, y, z = find_xyz_cut_coords(img,
                                  mask_img=mask_img)

    np.testing.assert_allclose((x, y, z),
                               (x_map, y_map, z_map),
                               # Need such a high tolerance for the test to
                               # pass. x, y, z = [49.5, 9.5, 39.5]
                               rtol=6e-2)

    # non-trivial affine
    affine = np.diag([1. / 2, 1 / 3., 1 / 4., 1.])
    img = nibabel.Nifti1Image(data, affine)
    mask_img = compute_epi_mask(img)
    x, y, z = find_xyz_cut_coords(img, mask_img=mask_img)
    np.testing.assert_allclose((x, y, z),
                               (x_map / 2., y_map / 3., z_map / 4.),
                               # Need such a high tolerance for the test to
                               # pass. x, y, z = [24.75, 3.17, 9.875]
                               rtol=6e-2)

    # regression test (cf. #473)
    # test case: no data exceeds the activation threshold
    data = np.ones((36, 43, 36))
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    x, y, z = find_xyz_cut_coords(img, activation_threshold=1.1)
    np.testing.assert_array_equal(
        np.array([x, y, z]),
        0.5 * np.array(data.shape).astype(np.float))

    # regression test (cf. #922)
    # pseudo-4D images as input (i.e., X, Y, Z, 1)
    # previously raised "ValueError: too many values to unpack"
    rng = np.random.RandomState(42)
    data_3d = rng.randn(10, 10, 10)
    data_4d = data_3d[..., np.newaxis]
    affine = np.eye(4)
    img_3d = nibabel.Nifti1Image(data_3d, affine)
    img_4d = nibabel.Nifti1Image(data_4d, affine)
    assert_equal(find_xyz_cut_coords(img_3d), find_xyz_cut_coords(img_4d))

    # test passing empty image returns coordinates pointing to AC-PC line
    data = np.zeros((20, 30, 40))
    affine = np.eye(4)
    img = nibabel.Nifti1Image(data, affine)
    cut_coords = find_xyz_cut_coords(img)
    assert_equal(cut_coords, [0.0, 0.0, 0.0])
    cut_coords = assert_warns(UserWarning, find_xyz_cut_coords, img)
Exemplo n.º 11
0
def test_check_threshold():
    matrix = np.array([[1., 2.], [2., 1.]])

    name = 'threshold'
    # few not correctly formatted strings for 'threshold'
    wrong_thresholds = ['0.1', '10', '10.2.3%', 'asdf%']
    for wrong_threshold in wrong_thresholds:
        assert_raises_regex(
            ValueError, '{0}.+should be a number followed by '
            'the percent sign'.format(name), check_threshold, wrong_threshold,
            matrix, 'fast_abs_percentile', name)

    threshold = object()
    assert_raises_regex(
        TypeError, '{0}.+should be either a number '
        'or a string'.format(name), check_threshold, threshold, matrix,
        'fast_abs_percentile', name)

    # Test threshold as int, threshold=2 should return as it is
    # since it is not string
    assert_equal(
        check_threshold(2, matrix, percentile_func=fast_abs_percentile), 2)

    # check whether raises a warning if given threshold is higher than expected
    assert_warns(UserWarning,
                 check_threshold,
                 3.,
                 matrix,
                 percentile_func=fast_abs_percentile)

    # test with numpy scalar as argument
    threshold = 2.
    threshold_numpy_scalar = np.float64(threshold)
    assert_equal(
        check_threshold(threshold, matrix,
                        percentile_func=fast_abs_percentile),
        check_threshold(threshold_numpy_scalar,
                        matrix,
                        percentile_func=fast_abs_percentile))

    # Test for threshold provided as a percentile of the data (str ending with a
    # %)
    assert_true(1. < check_threshold(
        "50%", matrix, percentile_func=fast_abs_percentile, name=name) <= 2.)
Exemplo n.º 12
0
def test_check_embedded_nifti_masker():
    owner = OwningClass()
    masker = check_embedded_nifti_masker(owner)
    assert_true(type(masker) is MultiNiftiMasker)

    for mask, multi_subject in ((MultiNiftiMasker(), True), (NiftiMasker(),
                                                             False)):
        owner = OwningClass(mask=mask)
        masker = check_embedded_nifti_masker(owner,
                                             multi_subject=multi_subject)
        assert_equal(type(masker), type(mask))
        for param_key in masker.get_params():
            if param_key not in [
                    'memory', 'memory_level', 'n_jobs', 'verbose'
            ]:
                assert_equal(getattr(masker, param_key),
                             getattr(mask, param_key))
            else:
                assert_equal(getattr(masker, param_key),
                             getattr(owner, param_key))

    # Check use of mask as mask_img
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    mask = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert_true(masker.mask_img is mask)

    # Check attribute forwarding
    data = np.zeros((9, 9, 9))
    data[2:-2, 2:-2, 2:-2] = 10
    imgs = nibabel.Nifti1Image(data, np.eye(4))
    mask = MultiNiftiMasker()
    mask.fit([[imgs]])
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert_true(masker.mask_img is mask.mask_img_)

    # Check conflict warning
    mask = NiftiMasker(mask_strategy='epi')
    owner = OwningClass(mask=mask)
    assert_warns(UserWarning, check_embedded_nifti_masker, owner)
Exemplo n.º 13
0
def test_smooth_img():
    # This function only checks added functionalities compared
    # to _smooth_array()
    shapes = ((10, 11, 12), (13, 14, 15))
    lengths = (17, 18)
    fwhm = (1., 2., 3.)

    img1, mask1 = data_gen.generate_fake_fmri(shape=shapes[0],
                                              length=lengths[0])
    img2, mask2 = data_gen.generate_fake_fmri(shape=shapes[1],
                                              length=lengths[1])

    for create_files in (False, True):
        with testing.write_tmp_imgs(img1, img2,
                                    create_files=create_files) as imgs:
            # List of images as input
            out = image.smooth_img(imgs, fwhm)
            assert_true(isinstance(out, list))
            assert_true(len(out) == 2)
            for o, s, l in zip(out, shapes, lengths):
                assert_true(o.shape == (s + (l,)))

            # Single image as input
            out = image.smooth_img(imgs[0], fwhm)
            assert_true(isinstance(out, nibabel.Nifti1Image))
            assert_true(out.shape == (shapes[0] + (lengths[0],)))

    # Check corner case situations when fwhm=0, See issue #1537
    # Test whether function smooth_img raises a warning when fwhm=0.
    assert_warns(UserWarning, image.smooth_img, img1, fwhm=0.)

    # Test output equal when fwhm=None and fwhm=0
    out_fwhm_none = image.smooth_img(img1, fwhm=None)
    out_fwhm_zero = image.smooth_img(img1, fwhm=0.)
    assert_array_equal(out_fwhm_none.get_data(), out_fwhm_zero.get_data())

    data1 = np.zeros((10, 11, 12))
    data1[2:4, 1:5, 3:6] = 1
    data2 = np.zeros((13, 14, 15))
    data2[2:4, 1:5, 3:6] = 9
    img1_nifti2 = nibabel.Nifti2Image(data1, affine=np.eye(4))
    img2_nifti2 = nibabel.Nifti2Image(data2, affine=np.eye(4))
    out = image.smooth_img([img1_nifti2, img2_nifti2], fwhm=1.)
Exemplo n.º 14
0
def test_load_uniform_ball_cloud():
    # Note: computed and shipped point clouds may differ since KMeans results
    # change after
    # https://github.com/scikit-learn/scikit-learn/pull/9288
    # but the exact position of the points does not matter as long as they are
    # well spread inside the unit ball
    for n_points in [10, 20, 40, 80, 160]:
        with warnings.catch_warnings(record=True) as w:
            points = surface._load_uniform_ball_cloud(n_points=n_points)
            assert_array_equal(points.shape, (n_points, 3))
            assert_equal(len(w), 0)
    assert_warns(surface.EfficiencyWarning,
                 surface._load_uniform_ball_cloud, n_points=3)
    for n_points in [3, 7]:
        computed = surface._uniform_ball_cloud(n_points)
        loaded = surface._load_uniform_ball_cloud(n_points)
        assert_array_almost_equal(computed, loaded)
        assert (np.std(computed, axis=0) > .1).all()
        assert (np.linalg.norm(computed, axis=1) <= 1).all()
Exemplo n.º 15
0
def test_check_threshold():
    matrix = np.array([[1., 2.],
                       [2., 1.]])

    name = 'threshold'
    # few not correctly formatted strings for 'threshold'
    wrong_thresholds = ['0.1', '10', '10.2.3%', 'asdf%']
    for wrong_threshold in wrong_thresholds:
        assert_raises_regex(ValueError,
                            '{0}.+should be a number followed by '
                            'the percent sign'.format(name),
                            check_threshold,
                            wrong_threshold, matrix,
                            'fast_abs_percentile', name)

    threshold = object()
    assert_raises_regex(TypeError,
                        '{0}.+should be either a number '
                        'or a string'.format(name),
                        check_threshold, threshold, matrix,
                        'fast_abs_percentile', name)

    # Test threshold as int, threshold=2 should return as it is
    # since it is not string
    assert_equal(check_threshold(2, matrix, percentile_func=fast_abs_percentile), 2)

    # check whether raises a warning if given threshold is higher than expected
    assert_warns(UserWarning, check_threshold, 3., matrix,
                 percentile_func=fast_abs_percentile)

    # test with numpy scalar as argument
    threshold = 2.
    threshold_numpy_scalar = np.float64(threshold)
    assert_equal(
        check_threshold(threshold, matrix, percentile_func=fast_abs_percentile),
        check_threshold(threshold_numpy_scalar, matrix,
                        percentile_func=fast_abs_percentile))

    # Test for threshold provided as a percentile of the data (str ending with a
    # %)
    assert_true(1. < check_threshold("50%", matrix,
                                     percentile_func=fast_abs_percentile,
                                     name=name) <= 2.)
def test_check_embedded_nifti_masker():
    owner = OwningClass()
    masker = check_embedded_nifti_masker(owner)
    assert_true(type(masker) is MultiNiftiMasker)

    for mask, multi_subject in (
            (MultiNiftiMasker(), True), (NiftiMasker(), False)):
        owner = OwningClass(mask=mask)
        masker = check_embedded_nifti_masker(owner,
                                             multi_subject=multi_subject)
        assert_equal(type(masker), type(mask))
        for param_key in masker.get_params():
            if param_key not in ['memory', 'memory_level', 'n_jobs',
                                 'verbose']:
                assert_equal(getattr(masker, param_key),
                            getattr(mask, param_key))
            else:
                assert_equal(getattr(masker, param_key),
                            getattr(owner, param_key))

    # Check use of mask as mask_img
    shape = (6, 8, 10, 5)
    affine = np.eye(4)
    mask = nibabel.Nifti1Image(np.ones(shape[:3], dtype=np.int8), affine)
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert_true(masker.mask_img is mask)

    # Check attribute forwarding
    data = np.zeros((9, 9, 9))
    data[2:-2, 2:-2, 2:-2] = 10
    imgs = nibabel.Nifti1Image(data, np.eye(4))
    mask = MultiNiftiMasker()
    mask.fit([[imgs]])
    owner = OwningClass(mask=mask)
    masker = check_embedded_nifti_masker(owner)
    assert_true(masker.mask_img is mask.mask_img_)

    # Check conflict warning
    mask = NiftiMasker(mask_strategy='epi')
    owner = OwningClass(mask=mask)
    assert_warns(UserWarning, check_embedded_nifti_masker, owner)
Exemplo n.º 17
0
def test_compute_gray_matter_mask():
    image = Nifti1Image(np.ones((9, 9, 9)), np.eye(4))

    mask = compute_gray_matter_mask(image, threshold=-1)
    mask1 = np.zeros((9, 9, 9))
    mask1[2:-2, 2:-2, 2:-2] = 1

    np.testing.assert_array_equal(mask1, mask.get_data())

    # Check that we get a useful warning for empty masks
    assert_warns(masking.MaskWarning, compute_gray_matter_mask, image, threshold=1)

    # Check that masks obtained from same FOV are the same
    img1 = Nifti1Image(np.full((9, 9, 9), np.random.rand()), np.eye(4))
    img2 = Nifti1Image(np.full((9, 9, 9), np.random.rand()), np.eye(4))

    mask_img1 = compute_gray_matter_mask(img1)
    mask_img2 = compute_gray_matter_mask(img2)
    np.testing.assert_array_equal(mask_img1.get_data(),
                                  mask_img2.get_data())
Exemplo n.º 18
0
def test_fetch_neurovault():
    with _TestTemporaryDirectory() as temp_dir:
        # check that nothing is downloaded in offline mode
        data = neurovault.fetch_neurovault(mode='offline', data_dir=temp_dir)
        assert_equal(len(data.images), 0)
        # try to download an image
        data = neurovault.fetch_neurovault(max_images=1,
                                           fetch_neurosynth_words=True,
                                           mode='overwrite',
                                           data_dir=temp_dir)
        # specifying a filter while leaving the default term
        # filters in place should raise a warning.
        assert_warns(UserWarning,
                     neurovault.fetch_neurovault,
                     image_filter=lambda x: True,
                     max_images=1,
                     mode='offline')
        # if neurovault was available one image matching
        # default filters should have been downloaded
        if data.images:
            assert_equal(len(data.images), 1)
            meta = data.images_meta[0]
            assert_false(meta['not_mni'])
            assert_true(meta['is_valid'])
            assert_false(meta['not_mni'])
            assert_false(meta['is_thresholded'])
            assert_false(
                meta['map_type'] in ['ROI/mask', 'anatomical', 'parcellation'])
            assert_false(meta['image_type'] == 'atlas')

        # using a data directory we can't write into should raise a
        # warning unless mode is 'offline'
        os.chmod(temp_dir, stat.S_IREAD | stat.S_IEXEC)
        os.chmod(os.path.join(temp_dir, 'neurovault'),
                 stat.S_IREAD | stat.S_IEXEC)
        if os.access(os.path.join(temp_dir, 'neurovault'), os.W_OK):
            return
        assert_warns(UserWarning,
                     neurovault.fetch_neurovault,
                     data_dir=temp_dir)
Exemplo n.º 19
0
def test_compute_gray_matter_mask():
    image = Nifti1Image(np.ones((9, 9, 9)), np.eye(4))

    mask = compute_gray_matter_mask(image, threshold=-1)
    mask1 = np.zeros((9, 9, 9))
    mask1[2:-2, 2:-2, 2:-2] = 1

    np.testing.assert_array_equal(mask1, get_data(mask))

    # Check that we get a useful warning for empty masks
    assert_warns(masking.MaskWarning,
                 compute_gray_matter_mask,
                 image,
                 threshold=1)

    # Check that masks obtained from same FOV are the same
    img1 = Nifti1Image(np.full((9, 9, 9), np.random.rand()), np.eye(4))
    img2 = Nifti1Image(np.full((9, 9, 9), np.random.rand()), np.eye(4))

    mask_img1 = compute_gray_matter_mask(img1)
    mask_img2 = compute_gray_matter_mask(img2)
    np.testing.assert_array_equal(get_data(mask_img1), get_data(mask_img2))
Exemplo n.º 20
0
def test_neurosynth_words_vectorized():
    n_im = 5
    with _TestTemporaryDirectory() as temp_dir:
        words_files = [
            os.path.join(temp_dir, 'words_for_image_{0}.json'.format(i))
            for i in range(n_im)
        ]
        words = [str(i) for i in range(n_im)]
        for i, file_name in enumerate(words_files):
            word_weights = np.zeros(n_im)
            word_weights[i] = 1
            words_dict = {
                'data': {
                    'values':
                    dict([(k, v) for k, v in zip(words, word_weights)])
                }
            }
            with open(file_name, 'wb') as words_file:
                words_file.write(json.dumps(words_dict).encode('utf-8'))
        freq, voc = neurovault.neurosynth_words_vectorized(words_files)
        assert_equal(freq.shape, (n_im, n_im))
        assert ((freq.sum(axis=0) == np.ones(n_im)).all())
        assert_warns(UserWarning, neurovault.neurosynth_words_vectorized,
                     (os.path.join(temp_dir, 'no_words_here.json'), ))
Exemplo n.º 21
0
def test_find_cuts_empty_mask_no_crash():
    img = nibabel.Nifti1Image(np.ones((2, 2, 2)), np.eye(4))
    mask = np.zeros((2, 2, 2)).astype(np.bool)
    cut_coords = assert_warns(UserWarning, find_xyz_cut_coords, img, mask=mask)
    np.testing.assert_array_equal(cut_coords, [.5, .5, .5])
Exemplo n.º 22
0
def test_find_cuts_empty_mask_no_crash():
    img = nibabel.Nifti1Image(np.ones((2, 2, 2)), np.eye(4))
    mask_img = compute_epi_mask(img)
    cut_coords = assert_warns(UserWarning, find_xyz_cut_coords, img,
                              mask_img=mask_img)
    np.testing.assert_array_equal(cut_coords, [.5, .5, .5])
Exemplo n.º 23
0
def test__smooth_array():
    """Test smoothing of images: _smooth_array()"""
    # Impulse in 3D
    data = np.zeros((40, 41, 42))
    data[20, 20, 20] = 1

    # fwhm divided by any test affine must be odd. Otherwise assertion below
    # will fail. ( 9 / 0.6 = 15 is fine)
    fwhm = 9
    test_affines = (np.eye(4), np.diag((1, 1, -1, 1)), np.diag((.6, 1, .6, 1)))
    for affine in test_affines:
        filtered = image._smooth_array(data, affine, fwhm=fwhm, copy=True)
        assert_false(np.may_share_memory(filtered, data))

        # We are expecting a full-width at half maximum of
        # fwhm / voxel_size:
        vmax = filtered.max()
        above_half_max = filtered > .5 * vmax
        for axis in (0, 1, 2):
            proj = np.any(np.any(np.rollaxis(above_half_max, axis=axis),
                                 axis=-1),
                          axis=-1)
            np.testing.assert_equal(proj.sum(),
                                    fwhm / np.abs(affine[axis, axis]))

    # Check that NaNs in the data do not propagate
    data[10, 10, 10] = np.NaN
    filtered = image._smooth_array(data,
                                   affine,
                                   fwhm=fwhm,
                                   ensure_finite=True,
                                   copy=True)
    assert_true(np.all(np.isfinite(filtered)))

    # Check copy=False.
    for affine in test_affines:
        data = np.zeros((40, 41, 42))
        data[20, 20, 20] = 1
        image._smooth_array(data, affine, fwhm=fwhm, copy=False)

        # We are expecting a full-width at half maximum of
        # fwhm / voxel_size:
        vmax = data.max()
        above_half_max = data > .5 * vmax
        for axis in (0, 1, 2):
            proj = np.any(np.any(np.rollaxis(above_half_max, axis=axis),
                                 axis=-1),
                          axis=-1)
            np.testing.assert_equal(proj.sum(),
                                    fwhm / np.abs(affine[axis, axis]))

    # Check fwhm='fast'
    for affine in test_affines:
        np.testing.assert_equal(image._smooth_array(data, affine, fwhm='fast'),
                                image._fast_smooth_array(data))

    # Check corner case when fwhm=0. See #1537
    # Test whether function _smooth_array raises a warning when fwhm=0.
    assert_warns(UserWarning, image._smooth_array, data, affine, fwhm=0.)

    # Test output equal when fwhm=None and fwhm=0
    out_fwhm_none = image._smooth_array(data, affine, fwhm=None)
    out_fwhm_zero = image._smooth_array(data, affine, fwhm=0.)
    assert_array_equal(out_fwhm_none, out_fwhm_zero)
Exemplo n.º 24
0
def test_find_cuts_empty_mask_no_crash():
    img = nibabel.Nifti1Image(np.ones((2, 2, 2)), np.eye(4))
    mask = np.zeros((2, 2, 2)).astype(np.bool)
    cut_coords = assert_warns(UserWarning, find_xyz_cut_coords, img,
                              mask=mask)
    np.testing.assert_array_equal(cut_coords, [.5, .5, .5])
Exemplo n.º 25
0
def test__smooth_array():
    """Test smoothing of images: _smooth_array()"""
    # Impulse in 3D
    data = np.zeros((40, 41, 42))
    data[20, 20, 20] = 1

    # fwhm divided by any test affine must be odd. Otherwise assertion below
    # will fail. ( 9 / 0.6 = 15 is fine)
    fwhm = 9
    test_affines = (np.eye(4), np.diag((1, 1, -1, 1)),
                    np.diag((.6, 1, .6, 1)))
    for affine in test_affines:
        filtered = image._smooth_array(data, affine,
                                         fwhm=fwhm, copy=True)
        assert_false(np.may_share_memory(filtered, data))

        # We are expecting a full-width at half maximum of
        # fwhm / voxel_size:
        vmax = filtered.max()
        above_half_max = filtered > .5 * vmax
        for axis in (0, 1, 2):
            proj = np.any(np.any(np.rollaxis(above_half_max,
                          axis=axis), axis=-1), axis=-1)
            np.testing.assert_equal(proj.sum(),
                                    fwhm / np.abs(affine[axis, axis]))

    # Check that NaNs in the data do not propagate
    data[10, 10, 10] = np.NaN
    filtered = image._smooth_array(data, affine, fwhm=fwhm,
                                   ensure_finite=True, copy=True)
    assert_true(np.all(np.isfinite(filtered)))

    # Check copy=False.
    for affine in test_affines:
        data = np.zeros((40, 41, 42))
        data[20, 20, 20] = 1
        image._smooth_array(data, affine, fwhm=fwhm, copy=False)

        # We are expecting a full-width at half maximum of
        # fwhm / voxel_size:
        vmax = data.max()
        above_half_max = data > .5 * vmax
        for axis in (0, 1, 2):
            proj = np.any(np.any(np.rollaxis(above_half_max,
                          axis=axis), axis=-1), axis=-1)
            np.testing.assert_equal(proj.sum(),
                                    fwhm / np.abs(affine[axis, axis]))

    # Check fwhm='fast'
    for affine in test_affines:
        np.testing.assert_equal(image._smooth_array(data, affine, fwhm='fast'),
                                image._fast_smooth_array(data))

    # Check corner case when fwhm=0. See #1537
    # Test whether function _smooth_array raises a warning when fwhm=0.
    assert_warns(UserWarning, image._smooth_array, data, affine, fwhm=0.)

    # Test output equal when fwhm=None and fwhm=0
    out_fwhm_none = image._smooth_array(data, affine, fwhm=None)
    out_fwhm_zero = image._smooth_array(data, affine, fwhm=0.)
    assert_array_equal(out_fwhm_none, out_fwhm_zero)