### Perform massively univariate analysis with permuted OLS ###################
tested_var = np.ones((n_samples, 1), dtype=float)  # intercept
neg_log_pvals, all_scores, h0 = permuted_ols(
    tested_var, fmri_masked, model_intercept=False,
    n_perm=5000,  # 5,000 for the sake of time. 10,000 is recommended
    two_sided_test=False,  # RPBI does not perform a two-sided test
    n_jobs=1)  # can be changed to use more CPUs
neg_log_pvals_unmasked = nifti_masker.inverse_transform(
    np.ravel(neg_log_pvals))

### Randomized Parcellation Based Inference ###################################
neg_log_pvals_rpbi, _, _ = randomized_parcellation_based_inference(
    tested_var, fmri_masked,
    np.asarray(nifti_masker.mask_img_.get_data()).astype(bool),
    n_parcellations=30,  # 30 for the sake of time, 100 is recommended
    n_parcels=1000,
    threshold='auto',
    n_perm=5000,  # 5,000 for the sake of time. 10,000 is recommended
    random_state=0, memory='nilearn_cache', n_jobs=1, verbose=True)
neg_log_pvals_rpbi_unmasked = nifti_masker.inverse_transform(
    neg_log_pvals_rpbi)

### Visualization #############################################################
import matplotlib.pyplot as plt
from nilearn.plotting import plot_stat_map

# Here, we should use a structural image as a background, when available.

# Various plotting parameters
z_slice = 39  # plotted slice
from nilearn.image.resampling import coord_transform
n_detections = (neg_log_pvals_slice_data > threshold).sum()
title = ('Negative $\log_{10}$ p-values'
         '\n(Non-parametric + max-type correction)'
         '\n%d detections') % n_detections
display.title(title, y=1.2)

### Randomized Parcellation Based Inference ###################################
from nilearn_sandbox.mass_univariate.rpbi import randomized_parcellation_based_inference

print "Randomized Parcellation Based Inference"
neg_log_pvals_rpbi, _, _ = randomized_parcellation_based_inference(
    age,
    gm_maps_masked,  # + intercept as a covariate by default
    np.asarray(nifti_masker.mask_img_.get_data()).astype(bool),
    n_parcellations=30,  # 30 for the sake of time, 100 is recommended
    n_parcels=1000,
    threshold='auto',
    n_perm=1000,  # 1,000 for the sake of time. 10,000 is recommended
    random_state=0,
    memory='nilearn_cache',
    n_jobs=1,
    verbose=True)
neg_log_pvals_rpbi_unmasked = nifti_masker.inverse_transform(
    np.ravel(neg_log_pvals_rpbi))

### Show results
fig = plt.figure(figsize=(5.5, 7.5), facecolor='k')

display = plot_stat_map(neg_log_pvals_rpbi_unmasked,
                        bg_img=bg_filename,
                        threshold=threshold,
                        cmap=plt.cm.RdBu_r,
    tested_var,
    fmri_masked,
    model_intercept=False,
    n_perm=5000,  # 5,000 for the sake of time. 10,000 is recommended
    two_sided_test=False,  # RPBI does not perform a two-sided test
    n_jobs=1)  # can be changed to use more CPUs
neg_log_pvals_unmasked = nifti_masker.inverse_transform(
    np.ravel(neg_log_pvals))

### Randomized Parcellation Based Inference ###################################
neg_log_pvals_rpbi, _, _ = randomized_parcellation_based_inference(
    tested_var,
    fmri_masked,
    np.asarray(nifti_masker.mask_img_.get_data()).astype(bool),
    n_parcellations=30,  # 30 for the sake of time, 100 is recommended
    n_parcels=1000,
    threshold='auto',
    n_perm=5000,  # 5,000 for the sake of time. 10,000 is recommended
    random_state=0,
    memory='nilearn_cache',
    n_jobs=1,
    verbose=True)
neg_log_pvals_rpbi_unmasked = nifti_masker.inverse_transform(
    neg_log_pvals_rpbi)

### Visualization #############################################################
import matplotlib.pyplot as plt
from nilearn.plotting import plot_stat_map

# Here, we should use a structural image as a background, when available.

# Various plotting parameters
Esempio n. 4
0
def test_randomized_parcellation_based_inference(random_state=1):
    """Test RPBI API.
    """
    # check random state
    rng = check_random_state(random_state)

    # Generate toy data
    # define data structure
    shape = (5, 5, 5)
    n_voxels = np.prod(shape)
    mask = np.ones(shape, dtype=bool)
    # data generation
    data = np.zeros(shape)
    data[1:3, 1:3, 1:3] = 1.
    data = data.reshape((1, -1))
    data = np.repeat(data, 8, 0)
    # add noise to avoid constant columns
    data += 0.1 * rng.randn(data.shape[0], data.shape[1])

    # Randomized Parcellation Based Inference
    n_parcellations = 2
    n_parcels = 3
    neg_log_pvals, counting_statistic_original_data, h0 = (
        randomized_parcellation_based_inference(
            np.ones(8), data, mask, confounding_vars=None,
            model_intercept=True,
            n_parcellations=n_parcellations, n_parcels=n_parcels,
            threshold=0.05 / n_parcels, n_perm=9, random_state=rng,
            verbose=True))
    # check pvalues
    expected_neg_log_pvals = np.zeros(shape)
    expected_neg_log_pvals[1:3, 1:3, 1:3] = 1.
    expected_neg_log_pvals = np.ravel(expected_neg_log_pvals)
    assert_equal(neg_log_pvals.shape, (n_voxels,))
    assert_array_almost_equal(neg_log_pvals, expected_neg_log_pvals)
    # check counting statistic
    assert_equal(counting_statistic_original_data.shape, (n_voxels,))
    assert_array_almost_equal(counting_statistic_original_data,
                              2 * expected_neg_log_pvals)
    # h0
    assert_equal(h0.shape, (9,))
    assert_array_almost_equal(h0, np.zeros(9))

    ### Same test with 1-dimensional tested_vars
    # check random state
    rng = check_random_state(random_state)
    rng.randn(data.shape[0], data.shape[1])
    # Randomized Parcellation Based Inference
    n_parcellations = 2
    n_parcels = 3
    neg_log_pvals, counting_statistic_original_data, h0 = (
        randomized_parcellation_based_inference(
            np.ones(8), data, mask, confounding_vars=None,
            model_intercept=True,
            n_parcellations=n_parcellations, n_parcels=n_parcels,
            threshold=0.05 / n_parcels, n_perm=9, random_state=rng,
            verbose=True))
    # check pvalues
    expected_neg_log_pvals = np.zeros(shape)
    expected_neg_log_pvals[1:3, 1:3, 1:3] = 1.
    expected_neg_log_pvals = np.ravel(expected_neg_log_pvals)
    assert_equal(neg_log_pvals.shape, (n_voxels,))
    assert_array_almost_equal(neg_log_pvals, expected_neg_log_pvals)
    # check counting statistic
    assert_equal(counting_statistic_original_data.shape, (n_voxels,))
    assert_array_almost_equal(counting_statistic_original_data,
                              2 * expected_neg_log_pvals)
    # h0
    assert_equal(h0.shape, (9,))
    assert_array_almost_equal(h0, np.zeros(9))
def test_randomized_parcellation_based_inference(random_state=1):
    """Test RPBI API.
    """
    # check random state
    rng = check_random_state(random_state)

    # Generate toy data
    # define data structure
    shape = (5, 5, 5)
    n_voxels = np.prod(shape)
    mask = np.ones(shape, dtype=bool)
    # data generation
    data = np.zeros(shape)
    data[1:3, 1:3, 1:3] = 1.
    data = data.reshape((1, -1))
    data = np.repeat(data, 8, 0)
    # add noise to avoid constant columns
    data += 0.1 * rng.randn(data.shape[0], data.shape[1])

    # Randomized Parcellation Based Inference
    n_parcellations = 2
    n_parcels = 3
    neg_log_pvals, counting_statistic_original_data, h0 = (
        randomized_parcellation_based_inference(
            np.ones(8),
            data,
            mask,
            confounding_vars=None,
            model_intercept=True,
            n_parcellations=n_parcellations,
            n_parcels=n_parcels,
            threshold=0.05 / n_parcels,
            n_perm=9,
            random_state=rng,
            verbose=True))
    # check pvalues
    expected_neg_log_pvals = np.zeros(shape)
    expected_neg_log_pvals[1:3, 1:3, 1:3] = 1.
    expected_neg_log_pvals = np.ravel(expected_neg_log_pvals)
    assert_equal(neg_log_pvals.shape, (n_voxels, ))
    assert_array_almost_equal(neg_log_pvals, expected_neg_log_pvals)
    # check counting statistic
    assert_equal(counting_statistic_original_data.shape, (n_voxels, ))
    assert_array_almost_equal(counting_statistic_original_data,
                              2 * expected_neg_log_pvals)
    # h0
    assert_equal(h0.shape, (9, ))
    assert_array_almost_equal(h0, np.zeros(9))

    ### Same test with 1-dimensional tested_vars
    # check random state
    rng = check_random_state(random_state)
    rng.randn(data.shape[0], data.shape[1])
    # Randomized Parcellation Based Inference
    n_parcellations = 2
    n_parcels = 3
    neg_log_pvals, counting_statistic_original_data, h0 = (
        randomized_parcellation_based_inference(
            np.ones(8),
            data,
            mask,
            confounding_vars=None,
            model_intercept=True,
            n_parcellations=n_parcellations,
            n_parcels=n_parcels,
            threshold=0.05 / n_parcels,
            n_perm=9,
            random_state=rng,
            verbose=True))
    # check pvalues
    expected_neg_log_pvals = np.zeros(shape)
    expected_neg_log_pvals[1:3, 1:3, 1:3] = 1.
    expected_neg_log_pvals = np.ravel(expected_neg_log_pvals)
    assert_equal(neg_log_pvals.shape, (n_voxels, ))
    assert_array_almost_equal(neg_log_pvals, expected_neg_log_pvals)
    # check counting statistic
    assert_equal(counting_statistic_original_data.shape, (n_voxels, ))
    assert_array_almost_equal(counting_statistic_original_data,
                              2 * expected_neg_log_pvals)
    # h0
    assert_equal(h0.shape, (9, ))
    assert_array_almost_equal(h0, np.zeros(9))
Esempio n. 6
0
# effect sign to add it back at the end and thus observe the signed effect
neg_log_pvals, t_scores_original_data, _ = permuted_ols(
    grouped_conditions_encoded, grouped_fmri_masked,
    # + intercept as a covariate by default
    n_perm=5000,  # 5,000 for the sake of time. 10,000 is recommended
    two_sided_test=False,  # RPBI does not perform a two-sided test
    n_jobs=1)  # can be changed to use more CPUs
neg_log_pvals_unmasked = nifti_masker.inverse_transform(
    neg_log_pvals)

### Randomized Parcellation Based Inference ###################################
neg_log_pvals_rpbi, _, _ = randomized_parcellation_based_inference(
    grouped_conditions_encoded, grouped_fmri_masked,
    # + intercept as a covariate by default
    nifti_masker.mask_img_.get_data().astype(bool),
    n_parcellations=30,  # 30 for the sake of time, 100 is recommended
    n_parcels=1000,
    threshold='auto',
    n_perm=5000,  # 5,000 for the sake of time. 10,000 is recommended
    random_state=0, memory='nilearn_cache',
    n_jobs=1, verbose=True)
neg_log_pvals_rpbi_unmasked = nifti_masker.inverse_transform(
    neg_log_pvals_rpbi)

### Visualization #############################################################
import matplotlib.pyplot as plt
from nilearn.plotting import plot_stat_map

# Use the fMRI mean image as a surrogate of anatomical data
from nilearn import image
mean_fmri_img = image.mean_img(haxby_dataset.func)