Ejemplo n.º 1
0
def test_roi_to_brain():
    s1 = create_sphere([15, 10, -8], radius=10)
    s2 = create_sphere([-15, 10, -8], radius=10)
    s3 = create_sphere([0, -15, -8], radius=10)
    masks = Brain_Data([s1, s2, s3])

    d = [1, 2, 3]
    m = roi_to_brain(d, masks)
    assert np.all([np.any(m.data == x) for x in d])

    d = pd.Series([1.1, 2.1, 3.1])
    m = roi_to_brain(d, masks)
    assert np.all([np.any(m.data == x) for x in d])

    d = np.array([1, 2, 3])
    m = roi_to_brain(d, masks)
    assert np.all([np.any(m.data == x) for x in d])

    d = pd.DataFrame([np.ones(10) * x for x in [1, 2, 3]])
    m = roi_to_brain(d, masks)
    assert len(m) == d.shape[1]
    assert np.all([np.any(m[0].data == x) for x in d[0]])

    d = np.array([np.ones(10) * x for x in [1, 2, 3]])
    m = roi_to_brain(d, masks)
    assert len(m) == d.shape[1]
    assert np.all([np.any(m[0].data == x) for x in d[0]])
Ejemplo n.º 2
0
def test_create_sphere():
    # Test values update to reflect the fact that standard Brain_Data mask has few voxels because ventricles are 0'd out

    a = create_sphere(radius=10, coordinates=[0, 0, 0])
    assert np.sum(a.get_data()) >= 497  # 515
    a = create_sphere(radius=[10, 5], coordinates=[[0, 0, 0], [15, 0, 25]])
    assert np.sum(a.get_data()) >= 553  # 571
    a = create_sphere(radius=10, coordinates=[[0, 0, 0], [15, 0, 25]])
    assert np.sum(a.get_data()) >= 1013  # 1051
Ejemplo n.º 3
0
def test_groupby_aggregate(sim_brain_data):
    s1 = create_sphere([12, 10, -8], radius=10)
    s2 = create_sphere([22, -2, -22], radius=10)
    mask = Brain_Data([s1, s2])
    d = sim_brain_data.groupby(mask)
    assert isinstance(d, Groupby)
    mn = sim_brain_data.aggregate(mask, "mean")
    assert isinstance(mn, Brain_Data)
    assert len(mn.shape()) == 1
Ejemplo n.º 4
0
def test_bootstrap(sim_brain_data):
    masked = sim_brain_data.apply_mask(
        create_sphere(radius=10, coordinates=[0, 0, 0]))
    n_samples = 3
    b = masked.bootstrap("mean", n_samples=n_samples)
    assert isinstance(b["Z"], Brain_Data)
    b = masked.bootstrap("std", n_samples=n_samples)
    assert isinstance(b["Z"], Brain_Data)
    b = masked.bootstrap("predict", n_samples=n_samples, plot=False)
    assert isinstance(b["Z"], Brain_Data)
    b = masked.bootstrap(
        "predict",
        n_samples=n_samples,
        plot=False,
        cv_dict={
            "type": "kfolds",
            "n_folds": 3
        },
    )
    assert isinstance(b["Z"], Brain_Data)
    b = masked.bootstrap("predict",
                         n_samples=n_samples,
                         save_weights=True,
                         plot=False)
    assert len(b["samples"]) == n_samples
Ejemplo n.º 5
0
def test_apply_mask(sim_brain_data):
    s1 = create_sphere([12, 10, -8], radius=10)
    assert isinstance(s1, nb.Nifti1Image)
    masked_dat = sim_brain_data.apply_mask(s1)
    assert masked_dat.shape()[1] == np.sum(s1.get_fdata() != 0)
    masked_dat = sim_brain_data.apply_mask(s1, resample_mask_to_brain=True)
    assert masked_dat.shape()[1] == np.sum(s1.get_fdata() != 0)
Ejemplo n.º 6
0
def test_check_brain_data(sim_brain_data):
    mask = Brain_Data(create_sphere([15, 10, -8], radius=10))
    a = check_brain_data(sim_brain_data)
    assert isinstance(a, Brain_Data)
    b = check_brain_data(sim_brain_data, mask=mask)
    assert isinstance(b, Brain_Data)
    assert b.shape()[1] == np.sum(mask.data==1)
Ejemplo n.º 7
0
def test_groupby(tmpdir):
    # Simulate Brain Data
    sim = Simulator()
    r = 10
    sigma = 1
    y = [0, 1]
    n_reps = 3
    output_dir = str(tmpdir)
    sim.create_data(y, sigma, reps=n_reps, output_dir=output_dir)

    s1 = create_sphere([12, 10, -8], radius=r)
    s2 = create_sphere([22, -2, -22], radius=r)
    mask = Brain_Data([s1, s2])

    y = pd.read_csv(os.path.join(str(tmpdir.join('y.csv'))),
                    header=None,
                    index_col=None)
    data = Brain_Data(glob.glob(str(tmpdir.join('data.nii.gz'))), Y=y)
    data.X = pd.DataFrame(
        {
            'Intercept': np.ones(len(data.Y)),
            'X1': np.array(data.Y).flatten()
        },
        index=None)

    dat = Groupby(data, mask)

    # Test length
    assert len(dat) == len(mask)

    # Test Index
    assert isinstance(dat[1], Brain_Data)

    # Test apply
    mn = dat.apply('mean')
    assert len(dat) == len(mn)
    # assert mn[0].mean() > mn[1].mean() #JC edit: it seems this check relies on chance from simulated data
    assert mn[1].shape() == np.sum(mask[1].data == 1)
    reg = dat.apply('regress')
    assert len(dat) == len(mn)
    # r = dict([(x,reg[x]['beta'][1]) for x in reg.iterkeys()])

    # Test combine
    combine_mn = dat.combine(mn)
    assert len(combine_mn.shape()) == 1
Ejemplo n.º 8
0
def test_extract_roi(sim_brain_data):
    mask = create_sphere([12, 10, -8], radius=10)
    assert len(sim_brain_data.extract_roi(mask, metric="mean")) == shape_2d[0]
    assert len(sim_brain_data.extract_roi(mask,
                                          metric="median")) == shape_2d[0]
    n_components = 2
    assert sim_brain_data.extract_roi(
        mask, metric="pca",
        n_components=n_components).shape == (n_components, shape_2d[0])
    with pytest.raises(NotImplementedError):
        sim_brain_data.extract_roi(mask, metric="p")

    assert isinstance(sim_brain_data[0].extract_roi(mask, metric="mean"),
                      (float, np.floating))
    assert isinstance(sim_brain_data[0].extract_roi(mask, metric="median"),
                      (float, np.floating))
    with pytest.raises(ValueError):
        sim_brain_data[0].extract_roi(mask, metric="pca")
    with pytest.raises(NotImplementedError):
        sim_brain_data[0].extract_roi(mask, metric="p")

    s1 = create_sphere([15, 10, -8], radius=10)
    s2 = create_sphere([-15, 10, -8], radius=10)
    s3 = create_sphere([0, -15, -8], radius=10)
    masks = Brain_Data([s1, s2, s3])
    mask = roi_to_brain([1, 2, 3], masks)
    assert len(sim_brain_data[0].extract_roi(mask,
                                             metric="mean")) == len(masks)
    assert len(sim_brain_data[0].extract_roi(mask,
                                             metric="median")) == len(masks)
    assert sim_brain_data.extract_roi(mask, metric="mean").shape == (
        len(masks),
        shape_2d[0],
    )
    assert sim_brain_data.extract_roi(mask, metric="median").shape == (
        len(masks),
        shape_2d[0],
    )
    assert len(
        sim_brain_data.extract_roi(mask,
                                   metric="pca",
                                   n_components=n_components)) == len(masks)
Ejemplo n.º 9
0
def test_find_spikes():
    sim = Simulator()
    y = [0, 1]
    n_reps = 50
    s1 = create_sphere([0, 0, 0], radius=3)
    d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)

    spikes = find_spikes(d1)
    assert isinstance(spikes, pd.DataFrame)
    assert spikes.shape[0] == len(d1)

    spikes = find_spikes(d1.to_nifti())
    assert isinstance(spikes, pd.DataFrame)
    assert spikes.shape[0] == len(d1)
Ejemplo n.º 10
0
def test_threshold():
    s1 = create_sphere([12, 10, -8], radius=10)
    s2 = create_sphere([22, -2, -22], radius=10)
    mask = Brain_Data(s1) * 5
    mask = mask + Brain_Data(s2)

    m1 = mask.threshold(upper=0.5)
    m2 = mask.threshold(upper=3)
    m3 = mask.threshold(upper="98%")
    m4 = Brain_Data(s1) * 5 + Brain_Data(s2) * -0.5
    m4 = mask.threshold(upper=0.5, lower=-0.3)
    assert np.sum(m1.data > 0) > np.sum(m2.data > 0)
    assert np.sum(m1.data > 0) == np.sum(m3.data > 0)
    assert np.sum(m4.data[(m4.data > -0.3) & (m4.data < 0.5)]) == 0
    assert np.sum(m4.data[(m4.data < -0.3) | (m4.data > 0.5)]) > 0

    # Test Regions
    r = mask.regions(min_region_size=10)
    m1 = Brain_Data(s1)
    m2 = r.threshold(1, binarize=True)
    assert len(np.unique(r.to_nifti().get_fdata())) == 2
    diff = m2 - m1
    assert np.sum(diff.data) == 0
Ejemplo n.º 11
0
def test_bootstrap(sim_brain_data):
    masked = sim_brain_data.apply_mask(create_sphere(radius=10, coordinates=[0, 0, 0]))
    n_samples = 3
    b = masked.bootstrap('mean', n_samples=n_samples)
    assert isinstance(b['Z'], Brain_Data)
    b = masked.bootstrap('std', n_samples=n_samples)
    assert isinstance(b['Z'], Brain_Data)
    b = masked.bootstrap('predict', n_samples=n_samples, plot=False)
    assert isinstance(b['Z'], Brain_Data)
    b = masked.bootstrap('predict', n_samples=n_samples,
                         plot=False, cv_dict={'type': 'kfolds', 'n_folds': 3})
    assert isinstance(b['Z'], Brain_Data)
    b = masked.bootstrap('predict', n_samples=n_samples,
                         save_weights=True, plot=False)
    assert len(b['samples']) == n_samples
Ejemplo n.º 12
0
def test_hyperalignment():
    sim = Simulator()
    y = [0, 1]
    n_reps = 10
    s1 = create_sphere([0, 0, 0], radius=3)
    d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)
    d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1)
    d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1)
    data = [d1, d2, d3]

    # Test deterministic brain_data
    out = align(data, method="deterministic_srm")

    bout = d1.align(out["common_model"], method="deterministic_srm")
    assert d1.shape() == bout["transformed"].shape
    assert d1.shape() == bout["common_model"].shape
    assert d1.shape()[1] == bout["transformation_matrix"].shape()[0]
    btransformed = np.dot(d1.data, bout["transformation_matrix"].data.T)
    np.testing.assert_almost_equal(
        0, np.sum(bout["transformed"].data - btransformed))

    # Test probabilistic brain_data
    bout = d1.align(out["common_model"], method="probabilistic_srm")
    assert d1.shape() == bout["transformed"].shape
    assert d1.shape() == bout["common_model"].shape
    assert d1.shape()[1] == bout["transformation_matrix"].shape()[0]
    btransformed = np.dot(d1.data, bout["transformation_matrix"].data.T)
    np.testing.assert_almost_equal(
        0, np.sum(bout["transformed"].data - btransformed))

    # Test procrustes brain_data
    out = align(data, method="procrustes")
    centered = data[0].data - np.mean(data[0].data, 0)
    transformed = (np.dot(centered / np.linalg.norm(centered),
                          out["transformation_matrix"][0].data) *
                   out["scale"][0])

    bout = d1.align(out["common_model"], method="procrustes")
    assert d1.shape() == bout["transformed"].shape()
    assert d1.shape() == bout["common_model"].shape()
    assert d1.shape()[1] == bout["transformation_matrix"].shape()[0]
    centered = d1.data - np.mean(d1.data, 0)
    btransformed = (np.dot(centered / np.linalg.norm(centered),
                           bout["transformation_matrix"].data) * bout["scale"])
    np.testing.assert_almost_equal(0,
                                   np.sum(bout["transformed"].data -
                                          btransformed),
                                   decimal=5)
    np.testing.assert_almost_equal(
        0, np.sum(out["transformed"][0].data - bout["transformed"].data))

    # Test over time
    sim = Simulator()
    y = [0, 1]
    n_reps = 10
    s1 = create_sphere([0, 0, 0], radius=5)
    d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)
    d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1)
    d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1)
    data = [d1, d2, d3]

    out = align(data, method="deterministic_srm", axis=1)
    bout = d1.align(out["common_model"], method="deterministic_srm", axis=1)
    assert d1.shape() == bout["transformed"].shape
    assert d1.shape() == bout["common_model"].shape
    assert d1.shape()[0] == bout["transformation_matrix"].shape()[0]
    btransformed = np.dot(d1.data.T, bout["transformation_matrix"].data.T)
    np.testing.assert_almost_equal(
        0, np.sum(bout["transformed"].data - btransformed.T))

    out = align(data, method="probabilistic_srm", axis=1)
    bout = d1.align(out["common_model"], method="probabilistic_srm", axis=1)
    assert d1.shape() == bout["transformed"].shape
    assert d1.shape() == bout["common_model"].shape
    assert d1.shape()[0] == bout["transformation_matrix"].shape()[0]
    btransformed = np.dot(d1.data.T, bout["transformation_matrix"].data.T)
    np.testing.assert_almost_equal(
        0, np.sum(bout["transformed"].data - btransformed.T))

    out = align(data, method="procrustes", axis=1)
    bout = d1.align(out["common_model"], method="procrustes", axis=1)
    assert d1.shape() == bout["transformed"].shape()
    assert d1.shape() == bout["common_model"].shape()
    assert d1.shape()[0] == bout["transformation_matrix"].shape()[0]
    centered = d1.data.T - np.mean(d1.data.T, 0)
    btransformed = (np.dot(centered / np.linalg.norm(centered),
                           bout["transformation_matrix"].data) * bout["scale"])
    np.testing.assert_almost_equal(0,
                                   np.sum(bout["transformed"].data -
                                          btransformed.T),
                                   decimal=5)
    np.testing.assert_almost_equal(
        0, np.sum(out["transformed"][0].data - bout["transformed"].data))
Ejemplo n.º 13
0
    plt.legend(loc="best")
    return plt


import nibabel as nib
import glob
for wrarun in list(glob.glob('D:/FaceData/func_img/wrarun*.nii')):
    func_img = nib.load(wrarun)

from nilearn.image import index_img

func_img = index_img(func_img, slice(10, 490, 8))

from nltools.mask import create_sphere
mask_img = create_sphere([40, -62, -20], radius=5)

# Transform dimensional form 4D to 2D
from nilearn.input_data import NiftiMasker

masker = NiftiMasker(mask_img=mask_img, standardize=True, memory_level=1)
X = masker.fit_transform(func_img)

import numpy as np

for run in list(glob.glob('D:/FaceData/label/run*.txt')):
    behavioral = np.recfromcsv(run, delimiter='')

y = behavioral['name']

condition_mask = np.logical_or(y == b'Male', y == b'Female')
Ejemplo n.º 14
0
import nibabel as nib
anat_img = nib.load('D:/FaceData/wmT1.nii')

from nltools.mask import create_sphere
mask = create_sphere([42, -58, -17], radius=30)
mask_data = anat_img.apply_mask(mask)
Ejemplo n.º 15
0
        Returns:
            data: (list) of Brain_Data objects
    '''

    dat = Brain_Data(mask).apply_mask(mask)
    new_data = np.zeros((dat.shape()[0], n_observations))
    for i in np.where(dat.data == 1)[0]:
        if np.random.randint(0, high=10) < p:
            new_data[i, :] = y
    noise = np.random.randn(new_data.shape[0], n_observations) * sigma
    dat.data = (new_data + noise).T
    return dat


mask = create_sphere([0, 45, 0], radius=8)
data = [simulate_data(n_observations, y, p, sigma, mask) for x in range(n_sub)]

plt.figure(figsize=(10, 3))
plt.plot(y)
plt.title('Simulated Signal', fontsize=20)
plt.xlabel('Time', fontsize=18)
plt.ylabel('Signal', fontsize=18)
plot_glass_brain(data[0].mean().to_nifti())

#########################################################################
# Hyperalign Data
# ---------------
#
# We will now align voxels with the same signal across participants. We will
# start using hyperalignment with the procrustes transform. The align function
Ejemplo n.º 16
0
                       draw_cross=False,
                       title='threshold image with intensity'
                       'value',
                       colorbar=False)
from scipy import ndimage

dil_mask = ndimage.binary_dilation(threshold_value_img)

from nilearn.plotting import plot_roi, show
plot_roi(anat_img,
         dil_mask,
         cut_coords=cut_coords,
         annotate=False,
         title='Dilated mask')

from nltools.mask import create_sphere

mask_second = create_sphere([30, -72, -6], radius=5)

mask_combined = np.logical_and(threshold_value_img, mask_second)

from nilearn.plotting import plot_roi, show

plot_roi(anat_img,
         mask_combined,
         title='Intersect mask',
         display_mode='ortho',
         cmap='hot')

show()
Ejemplo n.º 17
0
def sim_groupby(sim_brain_data):
    r = 10
    s1 = create_sphere([12, 10, -8], radius=r)
    s2 = create_sphere([22, -2, -22], radius=r)
    mask = Brain_Data([s1, s2])
    return Groupby(sim_brain_data, mask)
Ejemplo n.º 18
0
def roi_mask(x, y, z, label, size):
    mask = create_sphere([x, y, z], radius=size)
    # mask.to_filename("_".join([label, str(size), "mm.nii"]))
    return mask
Ejemplo n.º 19
0
import nitime.timeseries as ts
import nitime.analysis as nta
import nitime.viz as viz

try:
    from nibabel import load
except ImportError:
    raise ImportError(
        'You need nibabel (http:/nipy.org/nibabel/) in order to run this example'
    )

data_path = test_dir_path = os.path.join('D:/FaceData/', 'func_img')
func_img = os.path.join(data_path, 'wrarun1.nii')
func_img = load(func_img)
from nltools.mask import create_sphere
mask_img = create_sphere([-58, -41, 4], radius=3)
from nilearn.input_data import NiftiMasker

masker = NiftiMasker(mask_img=mask_img, standardize=True, memory_level=1)
func_img = masker.fit_transform(func_img)

behavioral = np.recfromcsv('D:/FaceData/label/run1.txt', delimiter='')
condition = behavioral['name']
onset = behavioral['onset']
import pandas as pd
events = pd.DataFrame({'onset': onset, 'trial_type': condition})

# from nilearn.image import index_img
# func_img=index_img(func_img, onset)
# data=func_img.get_data()
#------------------------------------------------------------------------
Ejemplo n.º 20
0
def test_align():
    # Test hyperalignment matrix
    sim = Simulator()
    y = [0, 1]
    n_reps = 10
    s1 = create_sphere([0, 0, 0], radius=3)
    d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)
    d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1)
    d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1)

    data = [d1.data, d2.data, d3.data]
    out = align(data, method="deterministic_srm")
    assert len(data) == len(out["transformed"])
    assert len(data) == len(out["transformation_matrix"])
    assert data[0].shape == out["common_model"].shape
    transformed = np.dot(data[0], out["transformation_matrix"][0])
    np.testing.assert_almost_equal(np.sum(out["transformed"][0] -
                                          transformed.T),
                                   0,
                                   decimal=3)
    assert len(out["isc"]) == out["transformed"][0].shape[0]

    out = align(data, method="probabilistic_srm")
    assert len(data) == len(out["transformed"])
    assert len(data) == len(out["transformation_matrix"])
    assert data[0].shape == out["common_model"].shape
    transformed = np.dot(data[0], out["transformation_matrix"][0])
    np.testing.assert_almost_equal(np.sum(out["transformed"][0] -
                                          transformed.T),
                                   0,
                                   decimal=3)
    assert len(out["isc"]) == out["transformed"][0].shape[0]

    out2 = align(data, method="procrustes")
    assert len(data) == len(out2["transformed"])
    assert data[0].shape == out2["common_model"].shape
    assert len(data) == len(out2["transformation_matrix"])
    assert len(data) == len(out2["disparity"])
    centered = data[0] - np.mean(data[0], 0)
    transformed = (np.dot(centered / np.linalg.norm(centered),
                          out2["transformation_matrix"][0]) * out2["scale"][0])
    np.testing.assert_almost_equal(np.sum(out2["transformed"][0] -
                                          transformed.T),
                                   0,
                                   decimal=3)
    assert out2["transformed"][0].shape == out2["transformed"][0].shape
    assert (out2["transformation_matrix"][0].shape ==
            out2["transformation_matrix"][0].shape)
    assert len(out2["isc"]) == out["transformed"][0].shape[0]

    # Test hyperalignment on Brain_Data
    data = [d1, d2, d3]
    out = align(data, method="deterministic_srm")
    assert len(data) == len(out["transformed"])
    assert len(data) == len(out["transformation_matrix"])
    assert data[0].shape() == out["common_model"].shape
    transformed = np.dot(d1.data, out["transformation_matrix"][0].data.T)
    np.testing.assert_almost_equal(np.sum(out["transformed"][0].data -
                                          transformed),
                                   0,
                                   decimal=3)
    assert len(out["isc"]) == out["transformed"][0].shape[1]

    out = align(data, method="probabilistic_srm")
    assert len(data) == len(out["transformed"])
    assert len(data) == len(out["transformation_matrix"])
    assert data[0].shape() == out["common_model"].shape
    transformed = np.dot(d1.data, out["transformation_matrix"][0].data.T)
    np.testing.assert_almost_equal(np.sum(out["transformed"][0].data -
                                          transformed),
                                   0,
                                   decimal=3)
    assert len(out["isc"]) == out["transformed"][0].shape[1]

    out2 = align(data, method="procrustes")
    assert len(data) == len(out2["transformed"])
    assert data[0].shape() == out2["common_model"].shape()
    assert len(data) == len(out2["transformation_matrix"])
    assert len(data) == len(out2["disparity"])
    centered = data[0].data - np.mean(data[0].data, 0)
    transformed = (np.dot(centered / np.linalg.norm(centered),
                          out2["transformation_matrix"][0].data) *
                   out2["scale"][0])
    np.testing.assert_almost_equal(np.sum(out2["transformed"][0].data -
                                          transformed),
                                   0,
                                   decimal=3)
    assert out2["transformed"][0].shape() == out2["transformed"][0].shape()
    assert (out2["transformation_matrix"][0].shape ==
            out2["transformation_matrix"][0].shape)
    assert len(out2["isc"]) == out2["transformed"][0].shape()[1]

    # Test hyperalignment on matrix over time (axis=1)
    sim = Simulator()
    y = [0, 1]
    n_reps = 10
    s1 = create_sphere([0, 0, 0], radius=5)
    d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)
    d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1)
    d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1)
    data = [d1.data, d2.data, d3.data]

    out = align(data, method="deterministic_srm", axis=1)
    assert len(data) == len(out["transformed"])
    assert len(data) == len(out["transformation_matrix"])
    assert data[0].shape == out["common_model"].shape
    transformed = np.dot(data[0].T, out["transformation_matrix"][0].data)
    np.testing.assert_almost_equal(np.sum(out["transformed"][0] - transformed),
                                   0,
                                   decimal=3)
    assert len(out["isc"]) == out["transformed"][0].shape[1]

    out = align(data, method="probabilistic_srm", axis=1)
    assert len(data) == len(out["transformed"])
    assert len(data) == len(out["transformation_matrix"])
    assert data[0].shape == out["common_model"].shape
    transformed = np.dot(data[0].T, out["transformation_matrix"][0])
    np.testing.assert_almost_equal(np.sum(out["transformed"][0] - transformed),
                                   0,
                                   decimal=3)
    assert len(out["isc"]) == out["transformed"][0].shape[1]

    out2 = align(data, method="procrustes", axis=1)
    assert len(data) == len(out2["transformed"])
    assert data[0].shape == out2["common_model"].shape
    assert len(data) == len(out2["transformation_matrix"])
    assert len(data) == len(out2["disparity"])
    centered = data[0] - np.mean(data[0], 0)
    transformed = (np.dot(
        (centered / np.linalg.norm(centered)).T,
        out2["transformation_matrix"][0].data,
    ) * out2["scale"][0])
    np.testing.assert_almost_equal(np.sum(out2["transformed"][0] -
                                          transformed),
                                   0,
                                   decimal=3)
    assert out2["transformed"][0].shape == out2["transformed"][0].shape
    assert (out2["transformation_matrix"][0].shape ==
            out2["transformation_matrix"][0].shape)
    assert len(out2["isc"]) == out2["transformed"][0].shape[0]

    # Test hyperalignment on Brain_Data over time (axis=1)
    data = [d1, d2, d3]
    out = align(data, method="deterministic_srm", axis=1)
    assert len(data) == len(out["transformed"])
    assert len(data) == len(out["transformation_matrix"])
    assert data[0].shape() == out["common_model"].shape
    transformed = np.dot(d1.data.T, out["transformation_matrix"][0].data).T
    np.testing.assert_almost_equal(np.sum(out["transformed"][0].data -
                                          transformed),
                                   0,
                                   decimal=5)
    assert len(out["isc"]) == out["transformed"][0].shape[0]

    out = align(data, method="probabilistic_srm", axis=1)
    assert len(data) == len(out["transformed"])
    assert len(data) == len(out["transformation_matrix"])
    assert data[0].shape() == out["common_model"].shape
    transformed = np.dot(d1.data.T, out["transformation_matrix"][0].data).T
    np.testing.assert_almost_equal(np.sum(out["transformed"][0].data -
                                          transformed),
                                   0,
                                   decimal=5)
    assert len(out["isc"]) == out["transformed"][0].shape[0]

    out2 = align(data, method="procrustes", axis=1)
    assert len(data) == len(out2["transformed"])
    assert data[0].shape() == out2["common_model"].shape()
    assert len(data) == len(out2["transformation_matrix"])
    assert len(data) == len(out2["disparity"])
    centered = data[0].data.T - np.mean(data[0].data.T, 0)
    transformed = (np.dot(centered / np.linalg.norm(centered),
                          out2["transformation_matrix"][0].data) *
                   out2["scale"][0]).T
    np.testing.assert_almost_equal(np.sum(out2["transformed"][0].data -
                                          transformed),
                                   0,
                                   decimal=5)
    assert out2["transformed"][0].shape() == out2["transformed"][0].shape()
    assert (out2["transformation_matrix"][0].shape ==
            out2["transformation_matrix"][0].shape)
    assert len(out2["isc"]) == out2["transformed"][0].shape()[1]
Ejemplo n.º 21
0
                  'model_200topics_2015Filtered2_10000iters.pklz')
model = Model.load(model_file)
model.display_model_summary()

# Create mask image
mask_data = (model.dataset.mask_img.get_data() != 0).astype(int)
affine = model.dataset.mask_img.affine
mask = nib.Nifti1Image(mask_data, affine)

###############################################################################
# Temporoparietal seed
# --------------------------------------
coords = [[-52, -56, 18]]
radii = [6] * len(coords)

roi_img = create_sphere(coords, radius=radii, mask=mask)
fig = plotting.plot_roi(roi_img,
                        display_mode='ortho',
                        cut_coords=[-52, -56, 18],
                        draw_cross=False)

df, _ = decode_roi(model, roi_img)
df = df.sort_values(by='Weight', ascending=False)
print(df.head(10))

###############################################################################
# Temporoparietal, medial parietal, and dorsomedial prefrontal seeds
# ------------------------------------------------------------------
coords = [[-56, -52, 18], [0, -58, 38], [4, 54, 26]]
radii = [6] * len(coords)
Ejemplo n.º 22
0
def test_brain_data(tmpdir):
    sim = Simulator()
    r = 10
    sigma = 1
    y = [0, 1]
    n_reps = 3
    output_dir = str(tmpdir)
    sim.create_data(y, sigma, reps=n_reps, output_dir=output_dir)

    shape_3d = (91, 109, 91)
    shape_2d = (6, 238955)
    y=pd.read_csv(os.path.join(str(tmpdir.join('y.csv'))), header=None,index_col=None).T
    holdout=pd.read_csv(os.path.join(str(tmpdir.join('rep_id.csv'))),header=None,index_col=None).T
    flist = glob.glob(str(tmpdir.join('centered*.nii.gz')))

    # Test load list
    dat = Brain_Data(data=flist,Y=y)

    # Test load file
    assert Brain_Data(flist[0])

    # Test to_nifti
    d = dat.to_nifti()
    assert d.shape[0:3] == shape_3d

    # Test load nibabel
    assert Brain_Data(d)

    # Test shape
    assert dat.shape() == shape_2d

    # Test Mean
    assert dat.mean().shape()[0] == shape_2d[1]

    # Test Std
    assert dat.std().shape()[0] == shape_2d[1]

    # Test add
    new = dat + dat
    assert new.shape() == shape_2d

    # Test subtract
    new = dat - dat
    assert new.shape() == shape_2d

    # Test multiply
    new = dat * dat
    assert new.shape() == shape_2d

    # Test Iterator
    x = [x for x in dat]
    assert len(x) == len(dat)
    assert len(x[0].data.shape) == 1

    # # Test T-test
    out = dat.ttest()
    assert out['t'].shape()[0] == shape_2d[1]

    # # # Test T-test - permutation method
    # out = dat.ttest(threshold_dict={'permutation':'tfce','n_permutations':50,'n_jobs':1})
    # assert out['t'].shape()[0]==shape_2d[1]

    # Test Regress
    dat.X = pd.DataFrame({'Intercept':np.ones(len(dat.Y)), 'X1':np.array(dat.Y).flatten()},index=None)
    out = dat.regress()
    assert out['beta'].shape() == (2,shape_2d[1])

    # Test indexing
    assert out['t'][1].shape()[0] == shape_2d[1]

    # Test threshold
    i=1
    tt = threshold(out['t'][i], out['p'][i], .05)
    assert isinstance(tt,Brain_Data)

    # Test write
    dat.write(os.path.join(str(tmpdir.join('test_write.nii'))))
    assert Brain_Data(os.path.join(str(tmpdir.join('test_write.nii'))))

    # Test append
    assert dat.append(dat).shape()[0]==shape_2d[0]*2

    # Test distance
    distance = dat.distance(method='euclidean')
    assert isinstance(distance,Adjacency)
    assert distance.square_shape()[0]==shape_2d[0]

    # Test predict
    stats = dat.predict(algorithm='svm', cv_dict={'type': 'kfolds','n_folds': 2}, plot=False,**{'kernel':"linear"})

    # Support Vector Regression, with 5 fold cross-validation with Platt Scaling
    # This will output probabilities of each class
    stats = dat.predict(algorithm='svm', cv_dict=None, plot=False,**{'kernel':'linear', 'probability':True})
    assert isinstance(stats['weight_map'],Brain_Data)

    # Logistic classificiation, with 2 fold cross-validation.
    stats = dat.predict(algorithm='logistic', cv_dict={'type': 'kfolds', 'n_folds': 2}, plot=False)
    assert isinstance(stats['weight_map'],Brain_Data)

    # Ridge classificiation,
    stats = dat.predict(algorithm='ridgeClassifier', cv_dict=None,plot=False)
    assert isinstance(stats['weight_map'],Brain_Data)

    # Ridge
    stats = dat.predict(algorithm='ridge', cv_dict={'type': 'kfolds', 'n_folds': 2,'subject_id':holdout}, plot=False,**{'alpha':.1})

    # Lasso
    stats = dat.predict(algorithm='lasso', cv_dict={'type': 'kfolds', 'n_folds': 2,'stratified':dat.Y}, plot=False,**{'alpha':.1})

    # PCR
    stats = dat.predict(algorithm='pcr', cv_dict=None, plot=False)

    # Test Similarity
    r = dat.similarity(stats['weight_map'])
    assert len(r) == shape_2d[0]
    r2 = dat.similarity(stats['weight_map'].to_nifti())
    assert len(r2) == shape_2d[0]

    # Test apply_mask - might move part of this to test mask suite
    s1 = create_sphere([12, 10, -8], radius=10)
    assert isinstance(s1, nb.Nifti1Image)
    s2 = Brain_Data(s1)
    masked_dat = dat.apply_mask(s1)
    assert masked_dat.shape()[1] == np.sum(s2.data != 0)

    # Test extract_roi
    mask = create_sphere([12, 10, -8], radius=10)
    assert len(dat.extract_roi(mask)) == shape_2d[0]

    # Test r_to_z
    z = dat.r_to_z()
    assert z.shape() == dat.shape()

    # Test copy
    d_copy = dat.copy()
    assert d_copy.shape() == dat.shape()

    # Test detrend
    detrend = dat.detrend()
    assert detrend.shape() == dat.shape()

    # Test standardize
    s = dat.standardize()
    assert s.shape() == dat.shape()
    assert np.isclose(np.sum(s.mean().data), 0, atol=.1)
    s = dat.standardize(method='zscore')
    assert s.shape() == dat.shape()
    assert np.isclose(np.sum(s.mean().data), 0, atol=.1)

    # Test Sum
    s = dat.sum()
    assert s.shape() == dat[1].shape()

    # Test Groupby
    s1 = create_sphere([12, 10, -8], radius=10)
    s2 = create_sphere([22, -2, -22], radius=10)
    mask = Brain_Data([s1, s2])
    d = dat.groupby(mask)
    assert isinstance(d, Groupby)

    # Test Aggregate
    mn = dat.aggregate(mask, 'mean')
    assert isinstance(mn, Brain_Data)
    assert len(mn.shape()) == 1

    # Test Threshold
    s1 = create_sphere([12, 10, -8], radius=10)
    s2 = create_sphere([22, -2, -22], radius=10)
    mask = Brain_Data(s1)*5
    mask = mask + Brain_Data(s2)

    m1 = mask.threshold(thresh=.5)
    m2 = mask.threshold(thresh=3)
    m3 = mask.threshold(thresh='98%')
    assert np.sum(m1.data > 0) > np.sum(m2.data > 0)
    assert np.sum(m1.data > 0) == np.sum(m3.data > 0)

    # Test Regions
    r = mask.regions(min_region_size=10)
    m1 = Brain_Data(s1)
    m2 = r.threshold(1, binarize=True)
    # assert len(r)==2
    assert len(np.unique(r.to_nifti().get_data())) == 2 # JC edit: I think this is what you were trying to do
    diff = m2-m1
    assert np.sum(diff.data) == 0
Ejemplo n.º 23
0
def test_align():
    # Test hyperalignment matrix
    sim = Simulator()
    y = [0, 1]
    n_reps = 10
    s1 = create_sphere([0, 0, 0], radius=3)
    d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)
    d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1)
    d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1)

    data = [d1.data.T, d2.data.T, d3.data.T]
    out = align(data, method='deterministic_srm')
    assert len(data) == len(out['transformed'])
    assert len(data) == len(out['transformation_matrix'])
    assert data[0].shape == out['common_model'].shape
    transformed = np.dot(data[0].T, out['transformation_matrix'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out['transformed'][0] - transformed.T))

    out = align(data, method='probabilistic_srm')
    assert len(data) == len(out['transformed'])
    assert len(data) == len(out['transformation_matrix'])
    assert data[0].shape == out['common_model'].shape
    transformed = np.dot(data[0].T, out['transformation_matrix'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out['transformed'][0] - transformed.T))

    out2 = align(data, method='procrustes')
    assert len(data) == len(out2['transformed'])
    assert data[0].shape == out2['common_model'].shape
    assert len(data) == len(out2['transformation_matrix'])
    assert len(data) == len(out2['disparity'])
    centered = data[0].T - np.mean(data[0].T, 0)
    transformed = (np.dot(centered / np.linalg.norm(centered),
                          out2['transformation_matrix'][0]) * out2['scale'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out2['transformed'][0] - transformed.T))
    assert out['transformed'][0].shape == out2['transformed'][0].shape
    assert out['transformation_matrix'][0].shape == out2[
        'transformation_matrix'][0].shape

    # Test hyperalignment on Brain_Data
    data = [d1, d2, d3]
    out = align(data, method='deterministic_srm')
    assert len(data) == len(out['transformed'])
    assert len(data) == len(out['transformation_matrix'])
    assert data[0].shape() == out['common_model'].shape()
    transformed = np.dot(d1.data, out['transformation_matrix'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out['transformed'][0].data - transformed))

    out = align(data, method='probabilistic_srm')
    assert len(data) == len(out['transformed'])
    assert len(data) == len(out['transformation_matrix'])
    assert data[0].shape() == out['common_model'].shape()
    transformed = np.dot(d1.data, out['transformation_matrix'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out['transformed'][0].data - transformed))

    out2 = align(data, method='procrustes')
    assert len(data) == len(out2['transformed'])
    assert data[0].shape() == out2['common_model'].shape()
    assert len(data) == len(out2['transformation_matrix'])
    assert len(data) == len(out2['disparity'])
    centered = data[0].data - np.mean(data[0].data, 0)
    transformed = (np.dot(centered / np.linalg.norm(centered),
                          out2['transformation_matrix'][0]) * out2['scale'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out2['transformed'][0].data - transformed))
    assert out['transformed'][0].shape() == out2['transformed'][0].shape()
    assert out['transformation_matrix'][0].shape == out2[
        'transformation_matrix'][0].shape

    # Test hyperalignment on matrix over time (axis=1)
    sim = Simulator()
    y = [0, 1]
    n_reps = 10
    s1 = create_sphere([0, 0, 0], radius=5)
    d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)
    d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1)
    d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1)
    data = [d1.data.T, d2.data.T, d3.data.T]
    out = align(data, method='deterministic_srm', axis=1)
    assert len(data) == len(out['transformed'])
    assert len(data) == len(out['transformation_matrix'])
    assert data[0].shape == out['common_model'].shape
    transformed = np.dot(data[0], out['transformation_matrix'][0])
    np.testing.assert_almost_equal(0,
                                   np.sum(out['transformed'][0] - transformed))

    out = align(data, method='probabilistic_srm', axis=1)
    assert len(data) == len(out['transformed'])
    assert len(data) == len(out['transformation_matrix'])
    assert data[0].shape == out['common_model'].shape
    transformed = np.dot(data[0], out['transformation_matrix'][0])
    np.testing.assert_almost_equal(0,
                                   np.sum(out['transformed'][0] - transformed))

    out2 = align(data, method='procrustes', axis=1)
    assert len(data) == len(out2['transformed'])
    assert data[0].shape == out2['common_model'].shape
    assert len(data) == len(out2['transformation_matrix'])
    assert len(data) == len(out2['disparity'])
    centered = data[0] - np.mean(data[0], 0)
    transformed = (np.dot(centered / np.linalg.norm(centered),
                          out2['transformation_matrix'][0]) * out2['scale'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out2['transformed'][0] - transformed))
    assert out['transformed'][0].shape == out2['transformed'][0].shape
    assert out['transformation_matrix'][0].shape == out2[
        'transformation_matrix'][0].shape

    # Test hyperalignment on Brain_Data over time (axis=1)
    data = [d1, d2, d3]
    out = align(data, method='deterministic_srm', axis=1)
    assert len(data) == len(out['transformed'])
    assert len(data) == len(out['transformation_matrix'])
    assert data[0].shape() == out['common_model'].shape()
    transformed = np.dot(d1.data.T, out['transformation_matrix'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out['transformed'][0].data - transformed.T))

    out = align(data, method='probabilistic_srm', axis=1)
    assert len(data) == len(out['transformed'])
    assert len(data) == len(out['transformation_matrix'])
    assert data[0].shape() == out['common_model'].shape()
    transformed = np.dot(d1.data.T, out['transformation_matrix'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out['transformed'][0].data - transformed.T))

    out2 = align(data, method='procrustes', axis=1)
    assert len(data) == len(out2['transformed'])
    assert data[0].shape() == out2['common_model'].shape()
    assert len(data) == len(out2['transformation_matrix'])
    assert len(data) == len(out2['disparity'])
    centered = data[0].data.T - np.mean(data[0].data.T, 0)
    transformed = (np.dot(centered / np.linalg.norm(centered),
                          out2['transformation_matrix'][0]) * out2['scale'][0])
    np.testing.assert_almost_equal(
        0, np.sum(out2['transformed'][0].data - transformed.T))
    assert out['transformed'][0].shape() == out2['transformed'][0].shape()
    assert out['transformation_matrix'][0].shape == out2[
        'transformation_matrix'][0].shape
Ejemplo n.º 24
0
# First, let's load the pain data for this example.

from nltools.datasets import fetch_pain

data = fetch_pain()

#########################################################################
# Apply_Mask
# ----------
#
# Spherical masks can be created using the create_sphere function.
# It requires specifying a center voxel and the radius of the sphere.

from nltools.mask import create_sphere

mask = create_sphere([0, 0, 0], radius=30)
masked_data = data.apply_mask(mask)
masked_data.mean().plot()

#########################################################################
# Extract Mean Within ROI
# -----------------------
#
# We can easily calculate the mean within an ROI for each image within a 
# Brain_Data() instance using the extract_roi() method. 

import matplotlib.pyplot as plt

mean = data.extract_roi(mask)
plt.plot(mean)
Ejemplo n.º 25
0
def test_brain_data(tmpdir):

    # Add 3mm to list to test that resolution as well
    for resolution in ['2mm']:

        MNI_Template["resolution"] = resolution

        sim = Simulator()
        r = 10
        sigma = 1
        y = [0, 1]
        n_reps = 3
        output_dir = str(tmpdir)
        dat = sim.create_data(y, sigma, reps=n_reps, output_dir=output_dir)

        if MNI_Template["resolution"] == '2mm':
            shape_3d = (91, 109, 91)
            shape_2d = (6, 238955)
        elif MNI_Template["resolution"] == '3mm':
            shape_3d = (60, 72, 60)
            shape_2d = (6, 71020)

        y = pd.read_csv(os.path.join(str(tmpdir.join('y.csv'))),header=None, index_col=None)
        holdout = pd.read_csv(os.path.join(str(tmpdir.join('rep_id.csv'))),header=None,index_col=None)

        # Test load list of 4D images
        file_list = [str(tmpdir.join('data.nii.gz')), str(tmpdir.join('data.nii.gz'))]
        dat = Brain_Data(file_list)
        dat = Brain_Data([nb.load(x) for x in file_list])

        # Test load list
        dat = Brain_Data(data=str(tmpdir.join('data.nii.gz')), Y=y)

        # Test concatenate
        out = Brain_Data([x for x in dat])
        assert isinstance(out, Brain_Data)
        assert len(out)==len(dat)

        # Test to_nifti
        d = dat.to_nifti()
        assert d.shape[0:3] == shape_3d

        # Test load nibabel
        assert Brain_Data(d)

        # Test shape
        assert dat.shape() == shape_2d

        # Test Mean
        assert dat.mean().shape()[0] == shape_2d[1]

        # Test Std
        assert dat.std().shape()[0] == shape_2d[1]

        # Test add
        new = dat + dat
        assert new.shape() == shape_2d

        # Test subtract
        new = dat - dat
        assert new.shape() == shape_2d

        # Test multiply
        new = dat * dat
        assert new.shape() == shape_2d

        # Test Indexing
        index = [0, 3, 1]
        assert len(dat[index]) == len(index)
        index = range(4)
        assert len(dat[index]) == len(index)
        index = dat.Y == 1

        assert len(dat[index.values.flatten()]) == index.values.sum()

        assert len(dat[index]) == index.values.sum()
        assert len(dat[:3]) == 3

        # Test Iterator
        x = [x for x in dat]
        assert len(x) == len(dat)
        assert len(x[0].data.shape) == 1

        # # Test T-test
        out = dat.ttest()
        assert out['t'].shape()[0] == shape_2d[1]

        # # # Test T-test - permutation method
        # out = dat.ttest(threshold_dict={'permutation':'tfce','n_permutations':50,'n_jobs':1})
        # assert out['t'].shape()[0]==shape_2d[1]

        # Test Regress
        dat.X = pd.DataFrame({'Intercept':np.ones(len(dat.Y)),
                            'X1':np.array(dat.Y).flatten()}, index=None)

        # Standard OLS
        out = dat.regress()

        assert type(out['beta'].data) == np.ndarray
        assert type(out['t'].data) == np.ndarray
        assert type(out['p'].data) == np.ndarray
        assert type(out['residual'].data) == np.ndarray
        assert type(out['df'].data) == np.ndarray
        assert out['beta'].shape() == (2, shape_2d[1])
        assert out['t'][1].shape()[0] == shape_2d[1]

        # Robust OLS
        out = dat.regress(mode='robust')

        assert type(out['beta'].data) == np.ndarray
        assert type(out['t'].data) == np.ndarray
        assert type(out['p'].data) == np.ndarray
        assert type(out['residual'].data) == np.ndarray
        assert type(out['df'].data) == np.ndarray
        assert out['beta'].shape() == (2, shape_2d[1])
        assert out['t'][1].shape()[0] == shape_2d[1]

        # Test threshold
        i=1
        tt = threshold(out['t'][i], out['p'][i], .05)
        assert isinstance(tt, Brain_Data)

        # Test write
        dat.write(os.path.join(str(tmpdir.join('test_write.nii'))))
        assert Brain_Data(os.path.join(str(tmpdir.join('test_write.nii'))))

        # Test append
        assert dat.append(dat).shape()[0] == shape_2d[0]*2

        # Test distance
        distance = dat.distance(method='euclidean')
        assert isinstance(distance, Adjacency)
        assert distance.square_shape()[0] == shape_2d[0]

        # Test predict
        stats = dat.predict(algorithm='svm',
                            cv_dict={'type': 'kfolds', 'n_folds': 2},
                            plot=False, **{'kernel':"linear"})

        # Support Vector Regression, with 5 fold cross-validation with Platt Scaling
        # This will output probabilities of each class
        stats = dat.predict(algorithm='svm',
                            cv_dict=None, plot=False,
                            **{'kernel':'linear', 'probability':True})
        assert isinstance(stats['weight_map'], Brain_Data)

        # Logistic classificiation, with 2 fold cross-validation.
        stats = dat.predict(algorithm='logistic',
                            cv_dict={'type': 'kfolds', 'n_folds': 2},
                            plot=False)
        assert isinstance(stats['weight_map'], Brain_Data)

        # Ridge classificiation,
        stats = dat.predict(algorithm='ridgeClassifier', cv_dict=None, plot=False)
        assert isinstance(stats['weight_map'], Brain_Data)

        # Ridge
        stats = dat.predict(algorithm='ridge',
                            cv_dict={'type': 'kfolds', 'n_folds': 2,
                            'subject_id':holdout}, plot=False, **{'alpha':.1})

        # Lasso
        stats = dat.predict(algorithm='lasso',
                            cv_dict={'type': 'kfolds', 'n_folds': 2,
                            'stratified':dat.Y}, plot=False, **{'alpha':.1})

        # PCR
        stats = dat.predict(algorithm='pcr', cv_dict=None, plot=False)

        # Test Similarity
        r = dat.similarity(stats['weight_map'])
        assert len(r) == shape_2d[0]
        r2 = dat.similarity(stats['weight_map'].to_nifti())
        assert len(r2) == shape_2d[0]
        r = dat.similarity(stats['weight_map'], method='dot_product')
        assert len(r) == shape_2d[0]
        r = dat.similarity(stats['weight_map'], method='cosine')
        assert len(r) == shape_2d[0]
        r = dat.similarity(dat, method='correlation')
        assert r.shape == (dat.shape()[0],dat.shape()[0])
        r = dat.similarity(dat, method='dot_product')
        assert r.shape == (dat.shape()[0],dat.shape()[0])
        r = dat.similarity(dat, method='cosine')
        assert r.shape == (dat.shape()[0],dat.shape()[0])

        # Test apply_mask - might move part of this to test mask suite
        s1 = create_sphere([12, 10, -8], radius=10)
        assert isinstance(s1, nb.Nifti1Image)
        masked_dat = dat.apply_mask(s1)
        assert masked_dat.shape()[1] == np.sum(s1.get_data() != 0)

        # Test extract_roi
        mask = create_sphere([12, 10, -8], radius=10)
        assert len(dat.extract_roi(mask)) == shape_2d[0]

        # Test r_to_z
        z = dat.r_to_z()
        assert z.shape() == dat.shape()

        # Test copy
        d_copy = dat.copy()
        assert d_copy.shape() == dat.shape()

        # Test detrend
        detrend = dat.detrend()
        assert detrend.shape() == dat.shape()

        # Test standardize
        s = dat.standardize()
        assert s.shape() == dat.shape()
        assert np.isclose(np.sum(s.mean().data), 0, atol=.1)
        s = dat.standardize(method='zscore')
        assert s.shape() == dat.shape()
        assert np.isclose(np.sum(s.mean().data), 0, atol=.1)

        # Test Sum
        s = dat.sum()
        assert s.shape() == dat[1].shape()

        # Test Groupby
        s1 = create_sphere([12, 10, -8], radius=10)
        s2 = create_sphere([22, -2, -22], radius=10)
        mask = Brain_Data([s1, s2])
        d = dat.groupby(mask)
        assert isinstance(d, Groupby)

        # Test Aggregate
        mn = dat.aggregate(mask, 'mean')
        assert isinstance(mn, Brain_Data)
        assert len(mn.shape()) == 1

        # Test Threshold
        s1 = create_sphere([12, 10, -8], radius=10)
        s2 = create_sphere([22, -2, -22], radius=10)
        mask = Brain_Data(s1)*5
        mask = mask + Brain_Data(s2)

        m1 = mask.threshold(upper=.5)
        m2 = mask.threshold(upper=3)
        m3 = mask.threshold(upper='98%')
        m4 = Brain_Data(s1)*5 + Brain_Data(s2)*-.5
        m4 = mask.threshold(upper=.5,lower=-.3)
        assert np.sum(m1.data > 0) > np.sum(m2.data > 0)
        assert np.sum(m1.data > 0) == np.sum(m3.data > 0)
        assert np.sum(m4.data[(m4.data > -.3) & (m4.data <.5)]) == 0
        assert np.sum(m4.data[(m4.data < -.3) | (m4.data >.5)]) > 0

        # Test Regions
        r = mask.regions(min_region_size=10)
        m1 = Brain_Data(s1)
        m2 = r.threshold(1, binarize=True)
        # assert len(r)==2
        assert len(np.unique(r.to_nifti().get_data())) == 2
        diff = m2-m1
        assert np.sum(diff.data) == 0

        # Test Bootstrap
        masked = dat.apply_mask(create_sphere(radius=10, coordinates=[0, 0, 0]))
        n_samples = 3
        b = masked.bootstrap('mean', n_samples=n_samples)
        assert isinstance(b['Z'], Brain_Data)
        b = masked.bootstrap('std', n_samples=n_samples)
        assert isinstance(b['Z'], Brain_Data)
        b = masked.bootstrap('predict', n_samples=n_samples, plot=False)
        assert isinstance(b['Z'], Brain_Data)
        b = masked.bootstrap('predict', n_samples=n_samples,
                        plot=False, cv_dict={'type':'kfolds','n_folds':3})
        assert isinstance(b['Z'], Brain_Data)
        b = masked.bootstrap('predict', n_samples=n_samples,
                        save_weights=True, plot=False)
        assert len(b['samples'])==n_samples

        # Test decompose
        n_components = 3
        stats = dat.decompose(algorithm='pca', axis='voxels',
                              n_components=n_components)
        assert n_components == len(stats['components'])
        assert stats['weights'].shape == (len(dat), n_components)

        stats = dat.decompose(algorithm='ica', axis='voxels',
                              n_components=n_components)
        assert n_components == len(stats['components'])
        assert stats['weights'].shape == (len(dat), n_components)

        dat.data = dat.data + 2
        dat.data[dat.data<0] = 0
        stats = dat.decompose(algorithm='nnmf', axis='voxels',
                              n_components=n_components)
        assert n_components == len(stats['components'])
        assert stats['weights'].shape == (len(dat), n_components)

        stats = dat.decompose(algorithm='fa', axis='voxels',
                              n_components=n_components)
        assert n_components == len(stats['components'])
        assert stats['weights'].shape == (len(dat), n_components)

        stats = dat.decompose(algorithm='pca', axis='images',
                              n_components=n_components)
        assert n_components == len(stats['components'])
        assert stats['weights'].shape == (len(dat), n_components)

        stats = dat.decompose(algorithm='ica', axis='images',
                              n_components=n_components)
        assert n_components == len(stats['components'])
        assert stats['weights'].shape == (len(dat), n_components)

        dat.data = dat.data + 2
        dat.data[dat.data<0] = 0
        stats = dat.decompose(algorithm='nnmf', axis='images',
                              n_components=n_components)
        assert n_components == len(stats['components'])
        assert stats['weights'].shape == (len(dat), n_components)

        stats = dat.decompose(algorithm='fa', axis='images',
                              n_components=n_components)
        assert n_components == len(stats['components'])
        assert stats['weights'].shape == (len(dat), n_components)

        # Test Hyperalignment Method
        sim = Simulator()
        y = [0, 1]
        n_reps = 10
        s1 = create_sphere([0, 0, 0], radius=3)
        d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)
        d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1)
        d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1)

        # Test procrustes using align
        data = [d1, d2, d3]
        out = align(data, method='procrustes')
        assert len(data) == len(out['transformed'])
        assert len(data) == len(out['transformation_matrix'])
        assert data[0].shape() == out['common_model'].shape()
        transformed = np.dot(d1.data, out['transformation_matrix'][0])
        centered = d1.data - np.mean(d1.data, 0)
        transformed = (np.dot(centered/np.linalg.norm(centered), out['transformation_matrix'][0])*out['scale'][0])
        np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data - transformed), decimal=5)

        # Test deterministic brain_data
        bout = d1.align(out['common_model'], method='deterministic_srm')
        assert d1.shape() == bout['transformed'].shape()
        assert d1.shape() == bout['common_model'].shape()
        assert d1.shape()[1] == bout['transformation_matrix'].shape[0]
        btransformed = np.dot(d1.data, bout['transformation_matrix'])
        np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data - btransformed))

        # Test deterministic brain_data
        bout = d1.align(out['common_model'], method='probabilistic_srm')
        assert d1.shape() == bout['transformed'].shape()
        assert d1.shape() == bout['common_model'].shape()
        assert d1.shape()[1] == bout['transformation_matrix'].shape[0]
        btransformed = np.dot(d1.data, bout['transformation_matrix'])
        np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed))

        # Test procrustes brain_data
        bout = d1.align(out['common_model'], method='procrustes')
        assert d1.shape() == bout['transformed'].shape()
        assert d1.shape() == bout['common_model'].shape()
        assert d1.shape()[1] == bout['transformation_matrix'].shape[0]
        centered = d1.data - np.mean(d1.data, 0)
        btransformed = (np.dot(centered/np.linalg.norm(centered), bout['transformation_matrix'])*bout['scale'])
        np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed), decimal=5)
        np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data - bout['transformed'].data))

        # Test hyperalignment on Brain_Data over time (axis=1)
        sim = Simulator()
        y = [0, 1]
        n_reps = 10
        s1 = create_sphere([0, 0, 0], radius=5)
        d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1)
        d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1)
        d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1)
        data = [d1, d2, d3]

        out = align(data, method='procrustes', axis=1)
        assert len(data) == len(out['transformed'])
        assert len(data) == len(out['transformation_matrix'])
        assert data[0].shape() == out['common_model'].shape()
        centered = data[0].data.T-np.mean(data[0].data.T, 0)
        transformed = (np.dot(centered/np.linalg.norm(centered), out['transformation_matrix'][0])*out['scale'][0])
        np.testing.assert_almost_equal(0,np.sum(out['transformed'][0].data-transformed.T), decimal=5)

        bout = d1.align(out['common_model'], method='deterministic_srm', axis=1)
        assert d1.shape() == bout['transformed'].shape()
        assert d1.shape() == bout['common_model'].shape()
        assert d1.shape()[0] == bout['transformation_matrix'].shape[0]
        btransformed = np.dot(d1.data.T, bout['transformation_matrix'])
        np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T))

        bout = d1.align(out['common_model'], method='probabilistic_srm', axis=1)
        assert d1.shape() == bout['transformed'].shape()
        assert d1.shape() == bout['common_model'].shape()
        assert d1.shape()[0] == bout['transformation_matrix'].shape[0]
        btransformed = np.dot(d1.data.T, bout['transformation_matrix'])
        np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T))

        bout = d1.align(out['common_model'], method='procrustes', axis=1)
        assert d1.shape() == bout['transformed'].shape()
        assert d1.shape() == bout['common_model'].shape()
        assert d1.shape()[0] == bout['transformation_matrix'].shape[0]
        centered = d1.data.T-np.mean(d1.data.T, 0)
        btransformed = (np.dot(centered/np.linalg.norm(centered), bout['transformation_matrix'])*bout['scale'])
        np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T), decimal=5)
        np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data-bout['transformed'].data))
Ejemplo n.º 26
0
def test_extract_roi(sim_brain_data):
    mask = create_sphere([12, 10, -8], radius=10)
    assert len(sim_brain_data.extract_roi(mask)) == shape_2d[0]
Ejemplo n.º 27
0
from gclda.utils import get_resource_path

###############################################################################
# Load model and initialize decoder
# ----------------------------------
model_file = join(get_resource_path(), 'models/Neurosynth2015Filtered2',
                  'model_200topics_2015Filtered2_10000iters.pklz')
model = Model.load(model_file)

###############################################################################
# Create region of interest (ROI) image
# --------------------------------------
coords = [[-40, -52, -20]]
radii = [6] * len(coords)

roi_img = create_sphere(coords, radius=radii, mask=model.dataset.mask_img)
fig = plotting.plot_roi(roi_img,
                        display_mode='ortho',
                        cut_coords=[-40, -52, -20],
                        draw_cross=False)

###############################################################################
# Decode ROI
# -----------
df, topic_weights = decode_roi(model, roi_img)

###############################################################################
# Get associated terms
# ---------------------
df = df.sort_values(by='Weight', ascending=False)
print(df.head(10))
    plt.legend(loc="best")
    return plt

for i in list(glob.glob('D:/FaceData/func_img/wrarun*.nii')):
    func_img=nib.load(i)

from nilearn.image import index_img

func_img=index_img(func_img,slice(10,490,8))

anat_img=nib.load('D:/FaceData/anat_img/anat.nii')

# Create the mask image

from nltools.mask import create_sphere
mask_img= create_sphere([-29,-71,-4],radius=5)
# from nilearn.masking import compute_epi_mask
# mask_img=compute_epi_mask(func_icamcg)

# Transform dimensional form 4D to 2D
from nilearn.input_data import NiftiMasker

masker = NiftiMasker(mask_img=mask_img, standardize=True,
                     memory_level=1)
fmri_masked = masker.fit_transform(func_img)

# Load behavioral data
for j in list(glob.glob('D:/FaceData/label/run*.txt')):
    behavioral=np.recfromcsv(j,delimiter='')

conditions = behavioral['name']
Ejemplo n.º 29
0
def test_data(tmpdir):
    sim = Simulator()
    r = 10
    sigma = 1
    y = [0, 1]
    n_reps = 3
    output_dir = str(tmpdir)
    sim.create_data(y, sigma, reps=n_reps, output_dir=output_dir)

    shape_3d = (91, 109, 91)
    shape_2d = (6, 238955)
    y = pd.read_csv(os.path.join(str(tmpdir.join('y.csv'))),
                    header=None,
                    index_col=None).T
    flist = glob.glob(str(tmpdir.join('centered*.nii.gz')))

    # Test load list
    dat = Brain_Data(data=flist, Y=y)

    # Test load file
    assert Brain_Data(flist[0])

    # Test to_nifti
    d = dat.to_nifti()
    assert d.shape[0:3] == shape_3d

    # Test load nibabel
    assert Brain_Data(d)

    # Test shape
    assert dat.shape() == shape_2d

    # Test Mean
    assert dat.mean().shape()[0] == shape_2d[1]

    # Test Std
    assert dat.std().shape()[0] == shape_2d[1]

    # Test add
    new = dat + dat
    assert new.shape() == shape_2d

    # Test subtract
    new = dat - dat
    assert new.shape() == shape_2d

    # Test multiply
    new = dat * dat
    assert new.shape() == shape_2d

    # Test Iterator
    x = [x for x in dat]
    assert len(x) == len(dat)
    assert len(x[0].data.shape) == 1

    # # Test T-test
    out = dat.ttest()
    assert out['t'].shape()[0] == shape_2d[1]

    # # # Test T-test - permutation method
    # out = dat.ttest(threshold_dict={'permutation':'tfce','n_permutations':50,'n_jobs':1})
    # assert out['t'].shape()[0]==shape_2d[1]

    # Test Regress
    dat.X = pd.DataFrame(
        {
            'Intercept': np.ones(len(dat.Y)),
            'X1': np.array(dat.Y).flatten()
        },
        index=None)
    out = dat.regress()
    assert out['beta'].shape() == (2, shape_2d[1])

    # Test indexing
    assert out['t'][1].shape()[0] == shape_2d[1]

    # Test threshold
    i = 1
    tt = threshold(out['t'][i], out['p'][i], .05)
    assert isinstance(tt, Brain_Data)

    # Test write
    dat.write(os.path.join(str(tmpdir.join('test_write.nii'))))
    assert Brain_Data(os.path.join(str(tmpdir.join('test_write.nii'))))

    # Test append
    assert dat.append(dat).shape()[0] == shape_2d[0] * 2

    # Test distance
    distance = dat.distance(method='euclidean')
    assert distance.shape == (shape_2d[0], shape_2d[0])

    # Test predict
    stats = dat.predict(algorithm='svm',
                        cv_dict={
                            'type': 'kfolds',
                            'n_folds': 2,
                            'n': len(dat.Y)
                        },
                        plot=False,
                        **{'kernel': "linear"})

    # Support Vector Regression, with 5 fold cross-validation with Platt Scaling
    # This will output probabilities of each class
    stats = dat.predict(algorithm='svm',
                        cv_dict=None,
                        plot=False,
                        **{
                            'kernel': 'linear',
                            'probability': True
                        })

    assert isinstance(stats['weight_map'], Brain_Data)
    # Logistic classificiation, with 5 fold stratified cross-validation.

    stats = dat.predict(algorithm='logistic',
                        cv_dict={
                            'type': 'kfolds',
                            'n_folds': 5,
                            'n': len(dat.Y)
                        },
                        plot=False)
    assert isinstance(stats['weight_map'], Brain_Data)

    # Ridge classificiation, with 5 fold between-subject cross-validation, where data for each subject is held out together.
    stats = dat.predict(algorithm='ridgeClassifier', cv_dict=None, plot=False)
    assert isinstance(stats['weight_map'], Brain_Data)

    # Test Similarity
    r = dat.similarity(stats['weight_map'])
    assert len(r) == shape_2d[0]
    r2 = dat.similarity(stats['weight_map'].to_nifti())
    assert len(r2) == shape_2d[0]

    # Test apply_mask - might move part of this to test mask suite
    s1 = create_sphere([41, 64, 55], radius=10)
    assert isinstance(s1, nb.Nifti1Image)
    s2 = Brain_Data(s1)
    masked_dat = dat.apply_mask(s1)
    assert masked_dat.shape()[1] == np.sum(s2.data != 0)

    # Test extract_roi
    mask = create_sphere([41, 64, 55], radius=10)
    assert len(dat.extract_roi(mask)) == shape_2d[0]

    # Test r_to_z
    z = dat.r_to_z()
    assert z.shape() == dat.shape()

    # Test copy
    d_copy = dat.copy()
    assert d_copy.shape() == dat.shape()

    # Test detrend
    detrend = dat.detrend()
    assert detrend.shape() == dat.shape()
Ejemplo n.º 30
0
tmp_dir = os.path.join(tempfile.gettempdir(), str(os.times()[-1]))

###############################################################################
# Create data

tic = time()  #Start Timer

sim = Simulator()
r = 10
sigma = .5
cor = .8
cov = .6
n_trials = 10
n_subs = 5
s1 = create_sphere([41, 64, 55], radius=r)
sim.create_cov_data(cor,
                    cov,
                    sigma,
                    mask=s1,
                    reps=n_trials,
                    n_sub=n_subs,
                    output_dir=tmp_dir)
print 'Simulate Data: Elapsed: %.2f seconds' % (time() - tic)  #Stop timer

###############################################################################
# Load data

tic = time()  #Start Timer

y = pd.read_csv(os.path.join(tmp_dir, 'y.csv'), header=None, index_col=None).T