def test_regress(sim_brain_data): sim_brain_data.X = pd.DataFrame( { "Intercept": np.ones(len(sim_brain_data.Y)), "X1": np.array(sim_brain_data.Y).flatten(), }, index=None, ) # OLS out = sim_brain_data.regress() assert type(out["beta"].data) == np.ndarray assert type(out["t"].data) == np.ndarray assert type(out["p"].data) == np.ndarray assert type(out["residual"].data) == np.ndarray assert out["beta"].shape() == (2, shape_2d[1]) assert out["t"][1].shape()[0] == shape_2d[1] # Robust OLS out = sim_brain_data.regress(mode="robust") assert type(out["beta"].data) == np.ndarray assert type(out["t"].data) == np.ndarray assert type(out["p"].data) == np.ndarray assert type(out["residual"].data) == np.ndarray assert out["beta"].shape() == (2, shape_2d[1]) assert out["t"][1].shape()[0] == shape_2d[1] # Test threshold i = 1 tt = threshold(out["t"][i], out["p"][i], 0.05) assert isinstance(tt, Brain_Data)
def test_regress(sim_brain_data): sim_brain_data.X = pd.DataFrame( { 'Intercept': np.ones(len(sim_brain_data.Y)), 'X1': np.array(sim_brain_data.Y).flatten() }, index=None) # OLS out = sim_brain_data.regress() assert type(out['beta'].data) == np.ndarray assert type(out['t'].data) == np.ndarray assert type(out['p'].data) == np.ndarray assert type(out['residual'].data) == np.ndarray assert out['beta'].shape() == (2, shape_2d[1]) assert out['t'][1].shape()[0] == shape_2d[1] # Robust OLS out = sim_brain_data.regress(mode='robust') assert type(out['beta'].data) == np.ndarray assert type(out['t'].data) == np.ndarray assert type(out['p'].data) == np.ndarray assert type(out['residual'].data) == np.ndarray assert out['beta'].shape() == (2, shape_2d[1]) assert out['t'][1].shape()[0] == shape_2d[1] # Test threshold i = 1 tt = threshold(out['t'][i], out['p'][i], .05) assert isinstance(tt, Brain_Data)
for i in all_sub_motor_rsa: rsa_stats.append( one_sample_permutation(fisher_r_to_z(all_sub_motor_rsa[i]))) # We can plot a thresholded map using fdr correction as the threshold # In[117]: fdr_p = fdr(np.array([x['p'] for x in rsa_stats]), q=0.05) print(fdr_p) rsa_motor_r = Brain_Data([x * y['mean'] for x, y in zip(mask_x, rsa_stats)]).sum() rsa_motor_p = Brain_Data([x * y['p'] for x, y in zip(mask_x, rsa_stats)]).sum() thresholded = threshold(rsa_motor_r, rsa_motor_p, thr=fdr_p) plot_glass_brain(thresholded.to_nifti(), cmap='coolwarm') # Looks like nothing survives FDR. Let's try a more liberal uncorrected threshold. # # In[124]: thresholded = threshold(rsa_motor_r, rsa_motor_p, thr=0.01) plot_glass_brain(thresholded.to_nifti(), cmap='coolwarm') # In[125]: view_img(thresholded.to_nifti())
def ttest(self, threshold_dict=None): """ Calculate one sample t-test across each voxel (two-sided) Args: threshold_dict: a dictionary of threshold parameters {'unc':.001} or {'fdr':.05} or {'permutation':tcfe,n_permutation:5000} Returns: out: dictionary of regression statistics in Brain_Data instances {'t','p'} """ t = deepcopy(self) p = deepcopy(self) if threshold_dict is not None: if 'permutation' in threshold_dict: # Convert data to correct shape (subjects, time, space) data_convert_shape = deepcopy(self.data) data_convert_shape = np.expand_dims(data_convert_shape, axis=1) if 'n_permutations' in threshold_dict: n_permutations = threshold_dict['n_permutations'] else: n_permutations = 1000 warnings.warn( 'n_permutations not set: running with 1000 permutations' ) if 'connectivity' in threshold_dict: connectivity = threshold_dict['connectivity'] else: connectivity = None if 'n_jobs' in threshold_dict: n_jobs = threshold_dict['n_jobs'] else: n_jobs = 1 if threshold_dict['permutation'] is 'tfce': perm_threshold = dict(start=0, step=0.2) else: perm_threshold = None if 'stat_fun' in threshold_dict: stat_fun = threshold_dict['stat_fun'] else: stat_fun = ttest_1samp_no_p t.data, clusters, p_values, h0 = spatio_temporal_cluster_1samp_test( data_convert_shape, tail=0, threshold=perm_threshold, stat_fun=stat_fun, connectivity=connectivity, n_permutations=n_permutations, n_jobs=n_jobs) t.data = t.data.squeeze() p = deepcopy(t) for cl, pval in zip(clusters, p_values): p.data[cl[1][0]] = pval else: t.data, p.data = ttest_1samp(self.data, 0, 0) else: t.data, p.data = ttest_1samp(self.data, 0, 0) if threshold_dict is not None: if type(threshold_dict) is dict: if 'unc' in threshold_dict: thr = threshold_dict['unc'] elif 'fdr' in threshold_dict: thr = fdr(p.data, q=threshold_dict['fdr']) elif 'permutation' in threshold_dict: thr = .05 thr_t = threshold(t, p, thr) out = {'t': t, 'p': p, 'thr_t': thr_t} else: raise ValueError( "threshold_dict is not a dictionary. Make sure it is in the form of {'unc':.001} or {'fdr':.05}" ) else: out = {'t': t, 'p': p} return out
def test_brain_data(tmpdir): # Add 3mm to list to test that resolution as well for resolution in ['2mm']: MNI_Template["resolution"] = resolution sim = Simulator() r = 10 sigma = 1 y = [0, 1] n_reps = 3 output_dir = str(tmpdir) dat = sim.create_data(y, sigma, reps=n_reps, output_dir=output_dir) if MNI_Template["resolution"] == '2mm': shape_3d = (91, 109, 91) shape_2d = (6, 238955) elif MNI_Template["resolution"] == '3mm': shape_3d = (60, 72, 60) shape_2d = (6, 71020) y = pd.read_csv(os.path.join(str(tmpdir.join('y.csv'))),header=None, index_col=None) holdout = pd.read_csv(os.path.join(str(tmpdir.join('rep_id.csv'))),header=None,index_col=None) # Test load list of 4D images file_list = [str(tmpdir.join('data.nii.gz')), str(tmpdir.join('data.nii.gz'))] dat = Brain_Data(file_list) dat = Brain_Data([nb.load(x) for x in file_list]) # Test load list dat = Brain_Data(data=str(tmpdir.join('data.nii.gz')), Y=y) # Test concatenate out = Brain_Data([x for x in dat]) assert isinstance(out, Brain_Data) assert len(out)==len(dat) # Test to_nifti d = dat.to_nifti() assert d.shape[0:3] == shape_3d # Test load nibabel assert Brain_Data(d) # Test shape assert dat.shape() == shape_2d # Test Mean assert dat.mean().shape()[0] == shape_2d[1] # Test Std assert dat.std().shape()[0] == shape_2d[1] # Test add new = dat + dat assert new.shape() == shape_2d # Test subtract new = dat - dat assert new.shape() == shape_2d # Test multiply new = dat * dat assert new.shape() == shape_2d # Test Indexing index = [0, 3, 1] assert len(dat[index]) == len(index) index = range(4) assert len(dat[index]) == len(index) index = dat.Y == 1 assert len(dat[index.values.flatten()]) == index.values.sum() assert len(dat[index]) == index.values.sum() assert len(dat[:3]) == 3 # Test Iterator x = [x for x in dat] assert len(x) == len(dat) assert len(x[0].data.shape) == 1 # # Test T-test out = dat.ttest() assert out['t'].shape()[0] == shape_2d[1] # # # Test T-test - permutation method # out = dat.ttest(threshold_dict={'permutation':'tfce','n_permutations':50,'n_jobs':1}) # assert out['t'].shape()[0]==shape_2d[1] # Test Regress dat.X = pd.DataFrame({'Intercept':np.ones(len(dat.Y)), 'X1':np.array(dat.Y).flatten()}, index=None) # Standard OLS out = dat.regress() assert type(out['beta'].data) == np.ndarray assert type(out['t'].data) == np.ndarray assert type(out['p'].data) == np.ndarray assert type(out['residual'].data) == np.ndarray assert type(out['df'].data) == np.ndarray assert out['beta'].shape() == (2, shape_2d[1]) assert out['t'][1].shape()[0] == shape_2d[1] # Robust OLS out = dat.regress(mode='robust') assert type(out['beta'].data) == np.ndarray assert type(out['t'].data) == np.ndarray assert type(out['p'].data) == np.ndarray assert type(out['residual'].data) == np.ndarray assert type(out['df'].data) == np.ndarray assert out['beta'].shape() == (2, shape_2d[1]) assert out['t'][1].shape()[0] == shape_2d[1] # Test threshold i=1 tt = threshold(out['t'][i], out['p'][i], .05) assert isinstance(tt, Brain_Data) # Test write dat.write(os.path.join(str(tmpdir.join('test_write.nii')))) assert Brain_Data(os.path.join(str(tmpdir.join('test_write.nii')))) # Test append assert dat.append(dat).shape()[0] == shape_2d[0]*2 # Test distance distance = dat.distance(method='euclidean') assert isinstance(distance, Adjacency) assert distance.square_shape()[0] == shape_2d[0] # Test predict stats = dat.predict(algorithm='svm', cv_dict={'type': 'kfolds', 'n_folds': 2}, plot=False, **{'kernel':"linear"}) # Support Vector Regression, with 5 fold cross-validation with Platt Scaling # This will output probabilities of each class stats = dat.predict(algorithm='svm', cv_dict=None, plot=False, **{'kernel':'linear', 'probability':True}) assert isinstance(stats['weight_map'], Brain_Data) # Logistic classificiation, with 2 fold cross-validation. stats = dat.predict(algorithm='logistic', cv_dict={'type': 'kfolds', 'n_folds': 2}, plot=False) assert isinstance(stats['weight_map'], Brain_Data) # Ridge classificiation, stats = dat.predict(algorithm='ridgeClassifier', cv_dict=None, plot=False) assert isinstance(stats['weight_map'], Brain_Data) # Ridge stats = dat.predict(algorithm='ridge', cv_dict={'type': 'kfolds', 'n_folds': 2, 'subject_id':holdout}, plot=False, **{'alpha':.1}) # Lasso stats = dat.predict(algorithm='lasso', cv_dict={'type': 'kfolds', 'n_folds': 2, 'stratified':dat.Y}, plot=False, **{'alpha':.1}) # PCR stats = dat.predict(algorithm='pcr', cv_dict=None, plot=False) # Test Similarity r = dat.similarity(stats['weight_map']) assert len(r) == shape_2d[0] r2 = dat.similarity(stats['weight_map'].to_nifti()) assert len(r2) == shape_2d[0] r = dat.similarity(stats['weight_map'], method='dot_product') assert len(r) == shape_2d[0] r = dat.similarity(stats['weight_map'], method='cosine') assert len(r) == shape_2d[0] r = dat.similarity(dat, method='correlation') assert r.shape == (dat.shape()[0],dat.shape()[0]) r = dat.similarity(dat, method='dot_product') assert r.shape == (dat.shape()[0],dat.shape()[0]) r = dat.similarity(dat, method='cosine') assert r.shape == (dat.shape()[0],dat.shape()[0]) # Test apply_mask - might move part of this to test mask suite s1 = create_sphere([12, 10, -8], radius=10) assert isinstance(s1, nb.Nifti1Image) masked_dat = dat.apply_mask(s1) assert masked_dat.shape()[1] == np.sum(s1.get_data() != 0) # Test extract_roi mask = create_sphere([12, 10, -8], radius=10) assert len(dat.extract_roi(mask)) == shape_2d[0] # Test r_to_z z = dat.r_to_z() assert z.shape() == dat.shape() # Test copy d_copy = dat.copy() assert d_copy.shape() == dat.shape() # Test detrend detrend = dat.detrend() assert detrend.shape() == dat.shape() # Test standardize s = dat.standardize() assert s.shape() == dat.shape() assert np.isclose(np.sum(s.mean().data), 0, atol=.1) s = dat.standardize(method='zscore') assert s.shape() == dat.shape() assert np.isclose(np.sum(s.mean().data), 0, atol=.1) # Test Sum s = dat.sum() assert s.shape() == dat[1].shape() # Test Groupby s1 = create_sphere([12, 10, -8], radius=10) s2 = create_sphere([22, -2, -22], radius=10) mask = Brain_Data([s1, s2]) d = dat.groupby(mask) assert isinstance(d, Groupby) # Test Aggregate mn = dat.aggregate(mask, 'mean') assert isinstance(mn, Brain_Data) assert len(mn.shape()) == 1 # Test Threshold s1 = create_sphere([12, 10, -8], radius=10) s2 = create_sphere([22, -2, -22], radius=10) mask = Brain_Data(s1)*5 mask = mask + Brain_Data(s2) m1 = mask.threshold(upper=.5) m2 = mask.threshold(upper=3) m3 = mask.threshold(upper='98%') m4 = Brain_Data(s1)*5 + Brain_Data(s2)*-.5 m4 = mask.threshold(upper=.5,lower=-.3) assert np.sum(m1.data > 0) > np.sum(m2.data > 0) assert np.sum(m1.data > 0) == np.sum(m3.data > 0) assert np.sum(m4.data[(m4.data > -.3) & (m4.data <.5)]) == 0 assert np.sum(m4.data[(m4.data < -.3) | (m4.data >.5)]) > 0 # Test Regions r = mask.regions(min_region_size=10) m1 = Brain_Data(s1) m2 = r.threshold(1, binarize=True) # assert len(r)==2 assert len(np.unique(r.to_nifti().get_data())) == 2 diff = m2-m1 assert np.sum(diff.data) == 0 # Test Bootstrap masked = dat.apply_mask(create_sphere(radius=10, coordinates=[0, 0, 0])) n_samples = 3 b = masked.bootstrap('mean', n_samples=n_samples) assert isinstance(b['Z'], Brain_Data) b = masked.bootstrap('std', n_samples=n_samples) assert isinstance(b['Z'], Brain_Data) b = masked.bootstrap('predict', n_samples=n_samples, plot=False) assert isinstance(b['Z'], Brain_Data) b = masked.bootstrap('predict', n_samples=n_samples, plot=False, cv_dict={'type':'kfolds','n_folds':3}) assert isinstance(b['Z'], Brain_Data) b = masked.bootstrap('predict', n_samples=n_samples, save_weights=True, plot=False) assert len(b['samples'])==n_samples # Test decompose n_components = 3 stats = dat.decompose(algorithm='pca', axis='voxels', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='ica', axis='voxels', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) dat.data = dat.data + 2 dat.data[dat.data<0] = 0 stats = dat.decompose(algorithm='nnmf', axis='voxels', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='fa', axis='voxels', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='pca', axis='images', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='ica', axis='images', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) dat.data = dat.data + 2 dat.data[dat.data<0] = 0 stats = dat.decompose(algorithm='nnmf', axis='images', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) stats = dat.decompose(algorithm='fa', axis='images', n_components=n_components) assert n_components == len(stats['components']) assert stats['weights'].shape == (len(dat), n_components) # Test Hyperalignment Method sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=3) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) # Test procrustes using align data = [d1, d2, d3] out = align(data, method='procrustes') assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape() == out['common_model'].shape() transformed = np.dot(d1.data, out['transformation_matrix'][0]) centered = d1.data - np.mean(d1.data, 0) transformed = (np.dot(centered/np.linalg.norm(centered), out['transformation_matrix'][0])*out['scale'][0]) np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data - transformed), decimal=5) # Test deterministic brain_data bout = d1.align(out['common_model'], method='deterministic_srm') assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[1] == bout['transformation_matrix'].shape[0] btransformed = np.dot(d1.data, bout['transformation_matrix']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data - btransformed)) # Test deterministic brain_data bout = d1.align(out['common_model'], method='probabilistic_srm') assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[1] == bout['transformation_matrix'].shape[0] btransformed = np.dot(d1.data, bout['transformation_matrix']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed)) # Test procrustes brain_data bout = d1.align(out['common_model'], method='procrustes') assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[1] == bout['transformation_matrix'].shape[0] centered = d1.data - np.mean(d1.data, 0) btransformed = (np.dot(centered/np.linalg.norm(centered), bout['transformation_matrix'])*bout['scale']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed), decimal=5) np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data - bout['transformed'].data)) # Test hyperalignment on Brain_Data over time (axis=1) sim = Simulator() y = [0, 1] n_reps = 10 s1 = create_sphere([0, 0, 0], radius=5) d1 = sim.create_data(y, 1, reps=n_reps, output_dir=None).apply_mask(s1) d2 = sim.create_data(y, 2, reps=n_reps, output_dir=None).apply_mask(s1) d3 = sim.create_data(y, 3, reps=n_reps, output_dir=None).apply_mask(s1) data = [d1, d2, d3] out = align(data, method='procrustes', axis=1) assert len(data) == len(out['transformed']) assert len(data) == len(out['transformation_matrix']) assert data[0].shape() == out['common_model'].shape() centered = data[0].data.T-np.mean(data[0].data.T, 0) transformed = (np.dot(centered/np.linalg.norm(centered), out['transformation_matrix'][0])*out['scale'][0]) np.testing.assert_almost_equal(0,np.sum(out['transformed'][0].data-transformed.T), decimal=5) bout = d1.align(out['common_model'], method='deterministic_srm', axis=1) assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[0] == bout['transformation_matrix'].shape[0] btransformed = np.dot(d1.data.T, bout['transformation_matrix']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T)) bout = d1.align(out['common_model'], method='probabilistic_srm', axis=1) assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[0] == bout['transformation_matrix'].shape[0] btransformed = np.dot(d1.data.T, bout['transformation_matrix']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T)) bout = d1.align(out['common_model'], method='procrustes', axis=1) assert d1.shape() == bout['transformed'].shape() assert d1.shape() == bout['common_model'].shape() assert d1.shape()[0] == bout['transformation_matrix'].shape[0] centered = d1.data.T-np.mean(d1.data.T, 0) btransformed = (np.dot(centered/np.linalg.norm(centered), bout['transformation_matrix'])*bout['scale']) np.testing.assert_almost_equal(0, np.sum(bout['transformed'].data-btransformed.T), decimal=5) np.testing.assert_almost_equal(0, np.sum(out['transformed'][0].data-bout['transformed'].data))
def test_brain_data(tmpdir): sim = Simulator() r = 10 sigma = 1 y = [0, 1] n_reps = 3 output_dir = str(tmpdir) sim.create_data(y, sigma, reps=n_reps, output_dir=output_dir) shape_3d = (91, 109, 91) shape_2d = (6, 238955) y=pd.read_csv(os.path.join(str(tmpdir.join('y.csv'))), header=None,index_col=None).T holdout=pd.read_csv(os.path.join(str(tmpdir.join('rep_id.csv'))),header=None,index_col=None).T flist = glob.glob(str(tmpdir.join('centered*.nii.gz'))) # Test load list dat = Brain_Data(data=flist,Y=y) # Test load file assert Brain_Data(flist[0]) # Test to_nifti d = dat.to_nifti() assert d.shape[0:3] == shape_3d # Test load nibabel assert Brain_Data(d) # Test shape assert dat.shape() == shape_2d # Test Mean assert dat.mean().shape()[0] == shape_2d[1] # Test Std assert dat.std().shape()[0] == shape_2d[1] # Test add new = dat + dat assert new.shape() == shape_2d # Test subtract new = dat - dat assert new.shape() == shape_2d # Test multiply new = dat * dat assert new.shape() == shape_2d # Test Iterator x = [x for x in dat] assert len(x) == len(dat) assert len(x[0].data.shape) == 1 # # Test T-test out = dat.ttest() assert out['t'].shape()[0] == shape_2d[1] # # # Test T-test - permutation method # out = dat.ttest(threshold_dict={'permutation':'tfce','n_permutations':50,'n_jobs':1}) # assert out['t'].shape()[0]==shape_2d[1] # Test Regress dat.X = pd.DataFrame({'Intercept':np.ones(len(dat.Y)), 'X1':np.array(dat.Y).flatten()},index=None) out = dat.regress() assert out['beta'].shape() == (2,shape_2d[1]) # Test indexing assert out['t'][1].shape()[0] == shape_2d[1] # Test threshold i=1 tt = threshold(out['t'][i], out['p'][i], .05) assert isinstance(tt,Brain_Data) # Test write dat.write(os.path.join(str(tmpdir.join('test_write.nii')))) assert Brain_Data(os.path.join(str(tmpdir.join('test_write.nii')))) # Test append assert dat.append(dat).shape()[0]==shape_2d[0]*2 # Test distance distance = dat.distance(method='euclidean') assert isinstance(distance,Adjacency) assert distance.square_shape()[0]==shape_2d[0] # Test predict stats = dat.predict(algorithm='svm', cv_dict={'type': 'kfolds','n_folds': 2}, plot=False,**{'kernel':"linear"}) # Support Vector Regression, with 5 fold cross-validation with Platt Scaling # This will output probabilities of each class stats = dat.predict(algorithm='svm', cv_dict=None, plot=False,**{'kernel':'linear', 'probability':True}) assert isinstance(stats['weight_map'],Brain_Data) # Logistic classificiation, with 2 fold cross-validation. stats = dat.predict(algorithm='logistic', cv_dict={'type': 'kfolds', 'n_folds': 2}, plot=False) assert isinstance(stats['weight_map'],Brain_Data) # Ridge classificiation, stats = dat.predict(algorithm='ridgeClassifier', cv_dict=None,plot=False) assert isinstance(stats['weight_map'],Brain_Data) # Ridge stats = dat.predict(algorithm='ridge', cv_dict={'type': 'kfolds', 'n_folds': 2,'subject_id':holdout}, plot=False,**{'alpha':.1}) # Lasso stats = dat.predict(algorithm='lasso', cv_dict={'type': 'kfolds', 'n_folds': 2,'stratified':dat.Y}, plot=False,**{'alpha':.1}) # PCR stats = dat.predict(algorithm='pcr', cv_dict=None, plot=False) # Test Similarity r = dat.similarity(stats['weight_map']) assert len(r) == shape_2d[0] r2 = dat.similarity(stats['weight_map'].to_nifti()) assert len(r2) == shape_2d[0] # Test apply_mask - might move part of this to test mask suite s1 = create_sphere([12, 10, -8], radius=10) assert isinstance(s1, nb.Nifti1Image) s2 = Brain_Data(s1) masked_dat = dat.apply_mask(s1) assert masked_dat.shape()[1] == np.sum(s2.data != 0) # Test extract_roi mask = create_sphere([12, 10, -8], radius=10) assert len(dat.extract_roi(mask)) == shape_2d[0] # Test r_to_z z = dat.r_to_z() assert z.shape() == dat.shape() # Test copy d_copy = dat.copy() assert d_copy.shape() == dat.shape() # Test detrend detrend = dat.detrend() assert detrend.shape() == dat.shape() # Test standardize s = dat.standardize() assert s.shape() == dat.shape() assert np.isclose(np.sum(s.mean().data), 0, atol=.1) s = dat.standardize(method='zscore') assert s.shape() == dat.shape() assert np.isclose(np.sum(s.mean().data), 0, atol=.1) # Test Sum s = dat.sum() assert s.shape() == dat[1].shape() # Test Groupby s1 = create_sphere([12, 10, -8], radius=10) s2 = create_sphere([22, -2, -22], radius=10) mask = Brain_Data([s1, s2]) d = dat.groupby(mask) assert isinstance(d, Groupby) # Test Aggregate mn = dat.aggregate(mask, 'mean') assert isinstance(mn, Brain_Data) assert len(mn.shape()) == 1 # Test Threshold s1 = create_sphere([12, 10, -8], radius=10) s2 = create_sphere([22, -2, -22], radius=10) mask = Brain_Data(s1)*5 mask = mask + Brain_Data(s2) m1 = mask.threshold(thresh=.5) m2 = mask.threshold(thresh=3) m3 = mask.threshold(thresh='98%') assert np.sum(m1.data > 0) > np.sum(m2.data > 0) assert np.sum(m1.data > 0) == np.sum(m3.data > 0) # Test Regions r = mask.regions(min_region_size=10) m1 = Brain_Data(s1) m2 = r.threshold(1, binarize=True) # assert len(r)==2 assert len(np.unique(r.to_nifti().get_data())) == 2 # JC edit: I think this is what you were trying to do diff = m2-m1 assert np.sum(diff.data) == 0