def test_brain_data(tmpdir): # Add 3mm to list to test that resolution as well for resolution in ['2mm']: MNI_Template["resolution"] = resolution sim = Simulator() r = 10 sigma = 1 y = [0, 1] n_reps = 3 output_dir = str(tmpdir) dat = sim.create_data(y, sigma, reps=n_reps, output_dir=output_dir) if MNI_Template["resolution"] == '2mm': shape_3d = (91, 109, 91) shape_2d = (6, 238955) elif MNI_Template["resolution"] == '3mm': shape_3d = (60, 72, 60) shape_2d = (6, 71020) y = pd.read_csv(os.path.join(str(tmpdir.join('y.csv'))),header=None, index_col=None) holdout = pd.read_csv(os.path.join(str(tmpdir.join('rep_id.csv'))),header=None,index_col=None) # Test load list of 4D images file_list = [str(tmpdir.join('data.nii.gz')), str(tmpdir.join('data.nii.gz'))] dat = Brain_Data(file_list) dat = Brain_Data([nb.load(x) for x in file_list]) # Test load list dat = Brain_Data(data=str(tmpdir.join('data.nii.gz')), Y=y) # Test concatenate out = Brain_Data([x for x in dat]) assert isinstance(out, Brain_Data) assert len(out)==len(dat) # Test to_nifti d = dat.to_nifti() assert d.shape[0:3] == shape_3d # Test load nibabel assert Brain_Data(d) # Test shape assert dat.shape() == shape_2d # Test Mean assert dat.mean().shape()[0] == shape_2d[1] # Test Std assert dat.std().shape()[0] == shape_2d[1] # Test add new = dat + dat assert new.shape() == shape_2d # Test subtract new = dat - dat assert new.shape() == shape_2d # Test multiply new = dat * dat assert new.shape() == shape_2d # Test Indexing index = [0, 3, 1] assert len(dat[index]) == len(index) index = range(4) assert len(dat[index]) == len(index) index = dat.Y == 1 assert len(dat[index.values.flatten()]) == index.values.sum() assert len(dat[index]) == index.values.sum() assert len(dat[:3]) == 3 # Test Iterator x = [x for x in dat] assert len(x) == len(dat) assert len(x[0].data.shape) == 1 # # Test T-test out = dat.ttest() assert out['t'].shape()[0] == shape_2d[1] # # # Test T-test - permutation method # out = dat.ttest(threshold_dict={'permutation':'tfce','n_permutations':50,'n_jobs':1}) # assert out['t'].shape()[0]==shape_2d[1] # Test Regress dat.X = pd.DataFrame({'Intercept':np.ones(len(dat.Y)), 'X1':np.array(dat.Y).flatten()}, index=None) # Standard OLS out = dat.regress() assert type(out['beta'].data) == np.ndarray assert type(out['t'].data) == np.ndarray assert type(out['p'].data) == np.ndarray assert type(out['residual'].data) == np.ndarray assert type(out['df'].data) == np.ndarray assert out['beta'].shape() == (2, shape_2d[1]) assert out['t'][1].shape()[0] == shape_2d[1] # Robust OLS out = dat.regress(mode='robust') assert type(out['beta'].data) == np.ndarray assert type(out['t'].data) == np.ndarray assert type(out['p'].data) == np.ndarray assert type(out['residual'].data) == np.ndarray assert type(out['df'].data) == np.ndarray assert out['beta'].shape() == (2, shape_2d[1]) assert out['t'][1].shape()[0] == shape_2d[1] # Test threshold i=1 tt = threshold(out['t'][i], out['p'][i], .05) assert isinstance(tt, Brain_Data) # Test write dat.write(os.path.join(str(tmpdir.join('test_write.nii')))) assert Brain_Data(os.path.join(str(tmpdir.join('test_write.nii')))) # Test append assert dat.append(dat).shape()[0] == shape_2d[0]*2 # Test distance distance = dat.distance(method='euclidean') assert isinstance(distance, Adjacency) assert distance.square_shape()[0] == shape_2d[0] # Test predict stats = dat.predict(algorithm='svm', cv_dict={'type': 'kfolds', 'n_folds': 2}, plot=False, **{'kernel':"linear"}) # Support Vector Regression, with 5 fold cross-validation with Platt Scaling # This will output probabilities of each class stats = dat.predict(algorithm='svm', cv_dict=None, plot=False, **{'kernel':'linear', 'probability':True}) assert isinstance(stats['weight_map'], Brain_Data) # Logistic classificiation, with 2 fold cross-validation. stats = dat.predict(algorithm='logistic', cv_dict={'type': 'kfolds', 'n_folds': 2}, plot=False) assert isinstance(stats['weight_map'], Brain_Data) # Ridge classificiation, stats = dat.predict(algorithm='ridgeClassifier', cv_dict=None, plot=False) assert isinstance(stats['weight_map'], Brain_Data) # Ridge stats = dat.predict(algorithm='ridge', cv_dict={'type': 'kfolds', 'n_folds': 2, 'subject_id':holdout}, plot=False, **{'alpha':.1}) # Lasso stats = dat.predict(algorithm='lasso', cv_dict={'type': 'kfolds', 'n_folds': 2, 'stratified':dat.Y}, plot=False, **{'alpha':.1}) # PCR stats = dat.predict(algorithm='pcr', cv_dict=None, plot=False) # Test Similarity r = dat.similarity(stats['weight_map']) assert len(r) == shape_2d[0] r2 = dat.similarity(stats['weight_map'].to_nifti()) assert len(r2) == shape_2d[0] r = dat.similarity(stats['weight_map'], method='dot_product') assert len(r) == shape_2d[0] r = dat.similarity(stats['weight_map'], method='cosine') assert len(r) == shape_2d[0] r = dat.similarity(dat, method='correlation') assert r.shape == (dat.shape()[0],dat.shape()[0]) r = dat.similarity(dat, method='dot_product') assert r.shape == (dat.shape()[0],dat.shape()[0]) r = dat.similarity(dat, method='cosine') assert r.shape == (dat.shape()[0],dat.shape()[0]) # Test apply_mask - might move part of this to test mask suite s1 = create_sphere([12, 10, -8], radius=10) assert isinstance(s1, nb.Nifti1Image) masked_dat = dat.apply_mask(s1) assert masked_dat.shape()[1] == np.sum(s1.get_data() != 0) # Test extract_roi mask = create_sphere([12, 10, -8], radius=10) assert len(dat.extract_roi(mask)) == shape_2d[0] # Test r_to_z z = dat.r_to_z() assert z.shape() == dat.shape() # Test copy d_copy = dat.copy() assert d_copy.shape() == dat.shape() # Test detrend detrend = dat.detrend() assert detrend.shape() == dat.shape() # Test standardize s = dat.standardize() assert s.shape() == dat.shape() assert np.isclose(np.sum(s.mean().data), 0, atol=.1) s = dat.standardize(method='zscore') assert s.shape() == dat.shape() assert np.isclose(np.sum(s.mean().data), 0, atol=.1) # Test Sum s = dat.sum() assert s.shape() == dat[1].shape() # Test Groupby s1 = create_sphere([12, 10, -8], radius=10) s2 = create_sphere([22, -2, -22], radius=10) mask = Brain_Data([s1, s2]) d = dat.groupby(mask) assert isinstance(d, Groupby) # Test Aggregate mn = dat.aggregate(mask, 'mean') assert isinstance(mn, Brain_Data) assert len(mn.shape()) == 1 # Test Threshold s1 = create_sphere([12, 10, -8], radius=10) s2 = create_sphere([22, -2, -22], radius=10) mask = Brain_Data(s1)*5 mask = mask + Brain_Data(s2) m1 = mask.threshold(upper=.5) m2 = mask.threshold(upper=3) m3 = mask.threshold(upper='98%') m4 = Brain_Data(s1)*5 + Brain_Data(s2)*-.5 m4 = mask.threshold(upper=.5,lower=-.3) assert np.sum(m1.data > 0) > np.sum(m2.data > 0) assert np.sum(m1.data > 0) == np.sum(m3.data > 0) assert np.sum(m4.data[(m4.data > -.3) & (m4.data <.5)]) == 0 assert np.sum(m4.data[(m4.data < -.3) | (m4.data >.5)]) > 0 # Test Regions r = mask.regions(min_region_size=10) m1 = Brain_Data(s1) m2 = r.threshold(1, binarize=True) # assert len(r)==2 assert len(np.unique(r.to_nifti().get_data())) == 2 diff = m2-m1 assert np.sum(diff.data) == 0 # Test Bootstrap masked = dat.apply_mask(create_sphere(radius=10, coordinates=[0, 0, 0])) n_samples = 3 b = masked.bootstrap('mean', n_samples=n_samples) assert isinstance(b['Z'], Brain_Data) b = masked.bootstrap('std', n_samples=n_samples) assert isinstance(b['Z'], Brain_Data) b = masked.bootstrap('predict', n_samples=n_samples, plot=False) assert isinstance(b['Z'], Brain_Data) b = masked.bootstrap('predict', n_samples=n_samples, plot=False, cv_dict={'type':'kfolds','n_folds':3}) assert isinstance(b['Z'], Brain_Data) b = masked.bootstrap('predict', n_samples=n_samples, save_weights=True, plot=False) assert len(b['samples'])==n_samples # Test decompose n_components = 3 stats = dat.decompose(algorithm='pca', axis='voxels', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='ica', axis='voxels', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) dat.data = dat.data + 2 dat.data[dat.data<0] = 0 stats = dat.decompose(algorithm='nnmf', axis='voxels', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='fa', axis='voxels', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='pca', axis='images', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='ica', axis='images', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) dat.data = dat.data + 2 dat.data[dat.data<0] = 0 stats = dat.decompose(algorithm='nnmf', axis='images', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='fa', axis='images', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) # Test Hyperalignment Method sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=3) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) # Test procrustes using align data = [d1, d2, d3] out = align(data, method='procrustes') assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape() == out['common_model'].shape() transformed = np.dot(d1.data, out['transformation_matrix'][0]) centered = d1.data - np.mean(d1.data, 0) transformed = (np.dot(centered/np.linalg.norm(centered), out['transformation_matrix'][0])*out['scale'][0]) np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data - transformed), decimal=5) # Test deterministic brain_data bout = d1.align(out['common_model'], method='deterministic_srm') assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[1] == bout['transformation_matrix'].shape[0] btransformed = np.dot(d1.data, bout['transformation_matrix']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data - btransformed)) # Test deterministic brain_data bout = d1.align(out['common_model'], method='probabilistic_srm') assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[1] == bout['transformation_matrix'].shape[0] btransformed = np.dot(d1.data, bout['transformation_matrix']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed)) # Test procrustes brain_data bout = d1.align(out['common_model'], method='procrustes') assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[1] == bout['transformation_matrix'].shape[0] centered = d1.data - np.mean(d1.data, 0) btransformed = (np.dot(centered/np.linalg.norm(centered), bout['transformation_matrix'])*bout['scale']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed), decimal=5) np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data - bout['transformed'].data)) # Test hyperalignment on Brain_Data over time (axis=1) sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=5) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) data = [d1, d2, d3] out = align(data, method='procrustes', axis=1) assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape() == out['common_model'].shape() centered = data[0].data.T-np.mean(data[0].data.T, 0) transformed = (np.dot(centered/np.linalg.norm(centered), out['transformation_matrix'][0])*out['scale'][0]) np.testing.assert_almost_equal(0,np.sum(out['transformed'][0].data-transformed.T), decimal=5) bout = d1.align(out['common_model'], method='deterministic_srm', axis=1) assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[0] == bout['transformation_matrix'].shape[0] btransformed = np.dot(d1.data.T, bout['transformation_matrix']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T)) bout = d1.align(out['common_model'], method='probabilistic_srm', axis=1) assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[0] == bout['transformation_matrix'].shape[0] btransformed = np.dot(d1.data.T, bout['transformation_matrix']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T)) bout = d1.align(out['common_model'], method='procrustes', axis=1) assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[0] == bout['transformation_matrix'].shape[0] centered = d1.data.T-np.mean(d1.data.T, 0) btransformed = (np.dot(centered/np.linalg.norm(centered), bout['transformation_matrix'])*bout['scale']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T), decimal=5) np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data-bout['transformed'].data))
def test_align(): # Test hyperalignment matrix sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=3) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) data = [d1.data, d2.data, d3.data] out = align(data, method="deterministic_srm") assert len(data) == len(out["transformed"]) assert len(data) == len(out["transformation_matrix"]) assert data[0].shape == out["common_model"].shape transformed = np.dot(data[0], out["transformation_matrix"][0]) np.testing.assert_almost_equal(np.sum(out["transformed"][0] - transformed.T), 0, decimal=3) assert len(out["isc"]) == out["transformed"][0].shape[0] out = align(data, method="probabilistic_srm") assert len(data) == len(out["transformed"]) assert len(data) == len(out["transformation_matrix"]) assert data[0].shape == out["common_model"].shape transformed = np.dot(data[0], out["transformation_matrix"][0]) np.testing.assert_almost_equal(np.sum(out["transformed"][0] - transformed.T), 0, decimal=3) assert len(out["isc"]) == out["transformed"][0].shape[0] out2 = align(data, method="procrustes") assert len(data) == len(out2["transformed"]) assert data[0].shape == out2["common_model"].shape assert len(data) == len(out2["transformation_matrix"]) assert len(data) == len(out2["disparity"]) centered = data[0] - np.mean(data[0], 0) transformed = (np.dot(centered / np.linalg.norm(centered), out2["transformation_matrix"][0]) * out2["scale"][0]) np.testing.assert_almost_equal(np.sum(out2["transformed"][0] - transformed.T), 0, decimal=3) assert out2["transformed"][0].shape == out2["transformed"][0].shape assert (out2["transformation_matrix"][0].shape == out2["transformation_matrix"][0].shape) assert len(out2["isc"]) == out["transformed"][0].shape[0] # Test hyperalignment on Brain_Data data = [d1, d2, d3] out = align(data, method="deterministic_srm") assert len(data) == len(out["transformed"]) assert len(data) == len(out["transformation_matrix"]) assert data[0].shape() == out["common_model"].shape transformed = np.dot(d1.data, out["transformation_matrix"][0].data.T) np.testing.assert_almost_equal(np.sum(out["transformed"][0].data - transformed), 0, decimal=3) assert len(out["isc"]) == out["transformed"][0].shape[1] out = align(data, method="probabilistic_srm") assert len(data) == len(out["transformed"]) assert len(data) == len(out["transformation_matrix"]) assert data[0].shape() == out["common_model"].shape transformed = np.dot(d1.data, out["transformation_matrix"][0].data.T) np.testing.assert_almost_equal(np.sum(out["transformed"][0].data - transformed), 0, decimal=3) assert len(out["isc"]) == out["transformed"][0].shape[1] out2 = align(data, method="procrustes") assert len(data) == len(out2["transformed"]) assert data[0].shape() == out2["common_model"].shape() assert len(data) == len(out2["transformation_matrix"]) assert len(data) == len(out2["disparity"]) centered = data[0].data - np.mean(data[0].data, 0) transformed = (np.dot(centered / np.linalg.norm(centered), out2["transformation_matrix"][0].data) * out2["scale"][0]) np.testing.assert_almost_equal(np.sum(out2["transformed"][0].data - transformed), 0, decimal=3) assert out2["transformed"][0].shape() == out2["transformed"][0].shape() assert (out2["transformation_matrix"][0].shape == out2["transformation_matrix"][0].shape) assert len(out2["isc"]) == out2["transformed"][0].shape()[1] # Test hyperalignment on matrix over time (axis=1) sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=5) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) data = [d1.data, d2.data, d3.data] out = align(data, method="deterministic_srm", axis=1) assert len(data) == len(out["transformed"]) assert len(data) == len(out["transformation_matrix"]) assert data[0].shape == out["common_model"].shape transformed = np.dot(data[0].T, out["transformation_matrix"][0].data) np.testing.assert_almost_equal(np.sum(out["transformed"][0] - transformed), 0, decimal=3) assert len(out["isc"]) == out["transformed"][0].shape[1] out = align(data, method="probabilistic_srm", axis=1) assert len(data) == len(out["transformed"]) assert len(data) == len(out["transformation_matrix"]) assert data[0].shape == out["common_model"].shape transformed = np.dot(data[0].T, out["transformation_matrix"][0]) np.testing.assert_almost_equal(np.sum(out["transformed"][0] - transformed), 0, decimal=3) assert len(out["isc"]) == out["transformed"][0].shape[1] out2 = align(data, method="procrustes", axis=1) assert len(data) == len(out2["transformed"]) assert data[0].shape == out2["common_model"].shape assert len(data) == len(out2["transformation_matrix"]) assert len(data) == len(out2["disparity"]) centered = data[0] - np.mean(data[0], 0) transformed = (np.dot( (centered / np.linalg.norm(centered)).T, out2["transformation_matrix"][0].data, ) * out2["scale"][0]) np.testing.assert_almost_equal(np.sum(out2["transformed"][0] - transformed), 0, decimal=3) assert out2["transformed"][0].shape == out2["transformed"][0].shape assert (out2["transformation_matrix"][0].shape == out2["transformation_matrix"][0].shape) assert len(out2["isc"]) == out2["transformed"][0].shape[0] # Test hyperalignment on Brain_Data over time (axis=1) data = [d1, d2, d3] out = align(data, method="deterministic_srm", axis=1) assert len(data) == len(out["transformed"]) assert len(data) == len(out["transformation_matrix"]) assert data[0].shape() == out["common_model"].shape transformed = np.dot(d1.data.T, out["transformation_matrix"][0].data).T np.testing.assert_almost_equal(np.sum(out["transformed"][0].data - transformed), 0, decimal=5) assert len(out["isc"]) == out["transformed"][0].shape[0] out = align(data, method="probabilistic_srm", axis=1) assert len(data) == len(out["transformed"]) assert len(data) == len(out["transformation_matrix"]) assert data[0].shape() == out["common_model"].shape transformed = np.dot(d1.data.T, out["transformation_matrix"][0].data).T np.testing.assert_almost_equal(np.sum(out["transformed"][0].data - transformed), 0, decimal=5) assert len(out["isc"]) == out["transformed"][0].shape[0] out2 = align(data, method="procrustes", axis=1) assert len(data) == len(out2["transformed"]) assert data[0].shape() == out2["common_model"].shape() assert len(data) == len(out2["transformation_matrix"]) assert len(data) == len(out2["disparity"]) centered = data[0].data.T - np.mean(data[0].data.T, 0) transformed = (np.dot(centered / np.linalg.norm(centered), out2["transformation_matrix"][0].data) * out2["scale"][0]).T np.testing.assert_almost_equal(np.sum(out2["transformed"][0].data - transformed), 0, decimal=5) assert out2["transformed"][0].shape() == out2["transformed"][0].shape() assert (out2["transformation_matrix"][0].shape == out2["transformation_matrix"][0].shape) assert len(out2["isc"]) == out2["transformed"][0].shape()[1]
def test_align(): # Test hyperalignment matrix sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=3) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) data = [d1.data.T, d2.data.T, d3.data.T] out = align(data, method='deterministic_srm') assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape == out['common_model'].shape transformed = np.dot(data[0].T, out['transformation_matrix'][0]) np.testing.assert_almost_equal( 0, np.sum(out['transformed'][0] - transformed.T)) out = align(data, method='probabilistic_srm') assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape == out['common_model'].shape transformed = np.dot(data[0].T, out['transformation_matrix'][0]) np.testing.assert_almost_equal( 0, np.sum(out['transformed'][0] - transformed.T)) out2 = align(data, method='procrustes') assert len(data) == len(out2['transformed']) assert data[0].shape == out2['common_model'].shape assert len(data) == len(out2['transformation_matrix']) assert len(data) == len(out2['disparity']) centered = data[0].T - np.mean(data[0].T, 0) transformed = (np.dot(centered / np.linalg.norm(centered), out2['transformation_matrix'][0]) * out2['scale'][0]) np.testing.assert_almost_equal( 0, np.sum(out2['transformed'][0] - transformed.T)) assert out['transformed'][0].shape == out2['transformed'][0].shape assert out['transformation_matrix'][0].shape == out2[ 'transformation_matrix'][0].shape # Test hyperalignment on Brain_Data data = [d1, d2, d3] out = align(data, method='deterministic_srm') assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape() == out['common_model'].shape() transformed = np.dot(d1.data, out['transformation_matrix'][0]) np.testing.assert_almost_equal( 0, np.sum(out['transformed'][0].data - transformed)) out = align(data, method='probabilistic_srm') assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape() == out['common_model'].shape() transformed = np.dot(d1.data, out['transformation_matrix'][0]) np.testing.assert_almost_equal( 0, np.sum(out['transformed'][0].data - transformed)) out2 = align(data, method='procrustes') assert len(data) == len(out2['transformed']) assert data[0].shape() == out2['common_model'].shape() assert len(data) == len(out2['transformation_matrix']) assert len(data) == len(out2['disparity']) centered = data[0].data - np.mean(data[0].data, 0) transformed = (np.dot(centered / np.linalg.norm(centered), out2['transformation_matrix'][0]) * out2['scale'][0]) np.testing.assert_almost_equal( 0, np.sum(out2['transformed'][0].data - transformed)) assert out['transformed'][0].shape() == out2['transformed'][0].shape() assert out['transformation_matrix'][0].shape == out2[ 'transformation_matrix'][0].shape # Test hyperalignment on matrix over time (axis=1) sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=5) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) data = [d1.data.T, d2.data.T, d3.data.T] out = align(data, method='deterministic_srm', axis=1) assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape == out['common_model'].shape transformed = np.dot(data[0], out['transformation_matrix'][0]) np.testing.assert_almost_equal(0, np.sum(out['transformed'][0] - transformed)) out = align(data, method='probabilistic_srm', axis=1) assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape == out['common_model'].shape transformed = np.dot(data[0], out['transformation_matrix'][0]) np.testing.assert_almost_equal(0, np.sum(out['transformed'][0] - transformed)) out2 = align(data, method='procrustes', axis=1) assert len(data) == len(out2['transformed']) assert data[0].shape == out2['common_model'].shape assert len(data) == len(out2['transformation_matrix']) assert len(data) == len(out2['disparity']) centered = data[0] - np.mean(data[0], 0) transformed = (np.dot(centered / np.linalg.norm(centered), out2['transformation_matrix'][0]) * out2['scale'][0]) np.testing.assert_almost_equal( 0, np.sum(out2['transformed'][0] - transformed)) assert out['transformed'][0].shape == out2['transformed'][0].shape assert out['transformation_matrix'][0].shape == out2[ 'transformation_matrix'][0].shape # Test hyperalignment on Brain_Data over time (axis=1) data = [d1, d2, d3] out = align(data, method='deterministic_srm', axis=1) assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape() == out['common_model'].shape() transformed = np.dot(d1.data.T, out['transformation_matrix'][0]) np.testing.assert_almost_equal( 0, np.sum(out['transformed'][0].data - transformed.T)) out = align(data, method='probabilistic_srm', axis=1) assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape() == out['common_model'].shape() transformed = np.dot(d1.data.T, out['transformation_matrix'][0]) np.testing.assert_almost_equal( 0, np.sum(out['transformed'][0].data - transformed.T)) out2 = align(data, method='procrustes', axis=1) assert len(data) == len(out2['transformed']) assert data[0].shape() == out2['common_model'].shape() assert len(data) == len(out2['transformation_matrix']) assert len(data) == len(out2['disparity']) centered = data[0].data.T - np.mean(data[0].data.T, 0) transformed = (np.dot(centered / np.linalg.norm(centered), out2['transformation_matrix'][0]) * out2['scale'][0]) np.testing.assert_almost_equal( 0, np.sum(out2['transformed'][0].data - transformed.T)) assert out['transformed'][0].shape() == out2['transformed'][0].shape() assert out['transformation_matrix'][0].shape == out2[ 'transformation_matrix'][0].shape
def test_hyperalignment(): sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=3) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) data = [d1, d2, d3] # Test deterministic brain_data out = align(data, method="deterministic_srm") bout = d1.align(out["common_model"], method="deterministic_srm") assert d1.shape() == bout["transformed"].shape assert d1.shape() == bout["common_model"].shape assert d1.shape()[1] == bout["transformation_matrix"].shape()[0] btransformed = np.dot(d1.data, bout["transformation_matrix"].data.T) np.testing.assert_almost_equal( 0, np.sum(bout["transformed"].data - btransformed)) # Test probabilistic brain_data bout = d1.align(out["common_model"], method="probabilistic_srm") assert d1.shape() == bout["transformed"].shape assert d1.shape() == bout["common_model"].shape assert d1.shape()[1] == bout["transformation_matrix"].shape()[0] btransformed = np.dot(d1.data, bout["transformation_matrix"].data.T) np.testing.assert_almost_equal( 0, np.sum(bout["transformed"].data - btransformed)) # Test procrustes brain_data out = align(data, method="procrustes") centered = data[0].data - np.mean(data[0].data, 0) transformed = (np.dot(centered / np.linalg.norm(centered), out["transformation_matrix"][0].data) * out["scale"][0]) bout = d1.align(out["common_model"], method="procrustes") assert d1.shape() == bout["transformed"].shape() assert d1.shape() == bout["common_model"].shape() assert d1.shape()[1] == bout["transformation_matrix"].shape()[0] centered = d1.data - np.mean(d1.data, 0) btransformed = (np.dot(centered / np.linalg.norm(centered), bout["transformation_matrix"].data) * bout["scale"]) np.testing.assert_almost_equal(0, np.sum(bout["transformed"].data - btransformed), decimal=5) np.testing.assert_almost_equal( 0, np.sum(out["transformed"][0].data - bout["transformed"].data)) # Test over time sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=5) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) data = [d1, d2, d3] out = align(data, method="deterministic_srm", axis=1) bout = d1.align(out["common_model"], method="deterministic_srm", axis=1) assert d1.shape() == bout["transformed"].shape assert d1.shape() == bout["common_model"].shape assert d1.shape()[0] == bout["transformation_matrix"].shape()[0] btransformed = np.dot(d1.data.T, bout["transformation_matrix"].data.T) np.testing.assert_almost_equal( 0, np.sum(bout["transformed"].data - btransformed.T)) out = align(data, method="probabilistic_srm", axis=1) bout = d1.align(out["common_model"], method="probabilistic_srm", axis=1) assert d1.shape() == bout["transformed"].shape assert d1.shape() == bout["common_model"].shape assert d1.shape()[0] == bout["transformation_matrix"].shape()[0] btransformed = np.dot(d1.data.T, bout["transformation_matrix"].data.T) np.testing.assert_almost_equal( 0, np.sum(bout["transformed"].data - btransformed.T)) out = align(data, method="procrustes", axis=1) bout = d1.align(out["common_model"], method="procrustes", axis=1) assert d1.shape() == bout["transformed"].shape() assert d1.shape() == bout["common_model"].shape() assert d1.shape()[0] == bout["transformation_matrix"].shape()[0] centered = d1.data.T - np.mean(d1.data.T, 0) btransformed = (np.dot(centered / np.linalg.norm(centered), bout["transformation_matrix"].data) * bout["scale"]) np.testing.assert_almost_equal(0, np.sum(bout["transformed"].data - btransformed.T), decimal=5) np.testing.assert_almost_equal( 0, np.sum(out["transformed"][0].data - bout["transformed"].data))
######################################################################### # Hyperalign Data # --------------- # # We will now align voxels with the same signal across participants. We will # start using hyperalignment with the procrustes transform. The align function # takes a list of Brain_Data objects (or numpy matrices) and aligns voxels based # on similar responses over time. The function outputs a dictionary with keys # for a list of the transformed data, corresponding transofmration matrices and # scaling terms. In addition it returns the "common model" in which all # subjects are projected. The disparity values correspond to the multivariate # distance of the subject to the common space. from nltools.stats import align out = align(data, method='procrustes') print(out.keys()) ######################################################################### # Plot Transformed Data # --------------------- # # To make it more clear what it is happening we plot the voxel by time matrices # separately for each subject. It is clear that there is a consistent signal # across voxels, but that the signal is distributed across 'different' voxels. # The transformed data shows the voxels for each subject aligned to the common # space. This now permits inferences across the voxels. As an example, we # plot the matrices of the original compared to the aligned data across subjects. f, a = plt.subplots(nrows=2,