def test_combine(): "Test combine()" ds1 = datasets.get_rand() ds2 = datasets.get_rand() ds = combine((ds1, ds2)) assert_array_equal(ds2['Y'].x, ds['Y'].x[ds1.n_cases:], "Basic combine") del ds1['Y'] del ds2['YCat'] ds = combine((ds1, ds2)) assert_array_equal(ds2['Y'].x, ds['Y'].x[ds1.n_cases:], "Combine with " "missing Var") assert_true(np.all(ds1['YCat'] == ds['YCat'][:ds1.n_cases]), "Combine " "with missing Factor") # combine NDVar with unequel dimensions ds = datasets.get_rand(utsnd=True) y = ds['utsnd'] y1 = y.sub(sensor=['0', '1', '2', '3']) y2 = y.sub(sensor=['1', '2', '3', '4']) ds1 = Dataset(y1) ds2 = Dataset(y2) dsc = combine((ds1, ds2)) y = dsc['utsnd'] assert_equal(y.sensor.names, ['1', '2', '3'], "Sensor dimension " "intersection failed.") dims = ('case', 'sensor', 'time') ref = np.concatenate((y1.get_data(dims)[:, 1:], y2.get_data(dims)[:, :3])) assert_array_equal(y.get_data(dims), ref, "combine utsnd")
def test_timeplot(): "Test plot.uv.timeplot" plot.configure_backend(False, False) ds = datasets.get_rand() ds['seq'] = Var(np.arange(2).repeat(30)) plot.uv.Timeplot('Y', 'B', 'seq', match='rm', ds=ds) plt.close('all')
def test_clusters(): "test plot.uts cluster plotting functions" plot.configure_backend(False, False) ds = datasets.get_rand() A = ds['A'] B = ds['B'] Y = ds['uts'] # fixed effects model res = testnd.anova(Y, A * B) p = plot.UTSClusters(res, title="Fixed Effects Model") p.close() # random effects model: subject = Factor(range(15), tile=4, random=True, name='subject') res = testnd.anova(Y, A * B * subject, samples=2) p = plot.UTSClusters(res, title="Random Effects Model") p.close() # plot UTSStat p = plot.UTSStat(Y, A % B, match=subject) p.set_clusters(res.clusters) p.close() p = plot.UTSStat(Y, A, Xax=B, match=subject) p.close()
def test_histogram(): "Test plot.uv.histogram" plot.configure_backend(False, False) ds = datasets.get_rand() plot.uv.Histogram('Y', 'A%B', ds=ds) plot.uv.Histogram('Y', 'A%B', match='rm', ds=ds) plt.close('all')
def test_lmfitter(): "Test the _nd_anova class" ds = datasets.get_rand() # independent, residuals vs. Hopkins y = ds['uts'].x x = ds.eval("A * B") lm = _nd_anova(x) f_maps = lm.map(y) p_maps = lm.p_maps(f_maps) x_full = ds.eval("A * B + ind(A%B)") lm_full = _nd_anova(x_full) f_maps_full = lm_full.map(y) p_maps_full = lm_full.p_maps(f_maps) for f, f_full in izip(f_maps, f_maps_full): assert_allclose(f, f_full) for p, p_full in izip(p_maps, p_maps_full): assert_allclose(p, p_full) # repeated measures x = ds.eval("A * B * rm") lm = _nd_anova(x) f_maps = lm.map(y) p_maps = lm.p_maps(f_maps) aov = test.anova(y[:, 0], x) for f_test, f_map, p_map in izip(aov.f_tests, f_maps, p_maps): assert_almost_equal(f_map[0], f_test.F) assert_almost_equal(p_map[0], f_test.p)
def test_corr(): "Test testnd.corr()" plot.configure_backend(False, False) ds = datasets.get_rand(True) # add correlation Y = ds['Y'] utsnd = ds['utsnd'] utsnd.x.shape utsnd.x[:, 3:5, 50:65] += Y.x[:, None, None] res = testnd.corr('utsnd', 'Y', 'rm', ds=ds) repr(res) p = plot.Array(res) p.close() res = testnd.corr('utsnd', 'Y', 'rm', ds=ds, samples=10, pmin=0.05) p = plot.Array(res) p.close() # persistence string = pickle.dumps(res, protocol=pickle.HIGHEST_PROTOCOL) res_ = pickle.loads(string) assert_equal(repr(res_), repr(res)) assert_dataobj_equal(res.p_uncorrected, res_.p_uncorrected) assert_dataobj_equal(res.p, res_.p) # NaN r = _testnd._corr(np.arange(10), np.zeros(10)) assert_equal(r, 0)
def test_plot_array(): "Test plot.Array" plot.configure_backend(False, False) ds = datasets.get_rand(utsnd=True) p = plot.Array('utsnd', 'A%B', ds=ds) p.close() p = plot.Array('utsnd', ds=ds) p.close()
def test_uts(): "test plot.UTS plotting function" plot.configure_backend(False, False) ds = datasets.get_rand() p = plot.UTS('uts', ds=ds) p.close() p = plot.UTS('uts', 'A%B', ds=ds) p.close()
def test_frequencies(): "test table.frequencies" ds = datasets.get_rand() A = ds['A'] B = ds['B'] Cat = ds['YCat'] print table.frequencies(Cat, A) print table.frequencies(Cat, A % B) print table.frequencies(Cat % A, B)
def test_plot_array(): "Test plot.TopoArray" plot.configure_backend(False, False) ds = datasets.get_rand(utsnd=True) p = plot.TopoArray('utsnd', ds=ds) p.close() p = plot.TopoArray('utsnd', ds=ds, vmax=0.2, w=2) p.close() p = plot.TopoArray('utsnd', 'A%B', ds=ds, axw=4) p.close()
def test_plot_topomap(): "Test plot.Topomap" plot.configure_backend(False, False) ds = datasets.get_rand(utsnd=True) topo = ds.eval('utsnd.summary(time=(0.075, 0.125))') p = plot.Topomap(topo, ds=ds) p.close() p = plot.Topomap(topo, ds=ds, vmax=0.2, w=2) p.close() p = plot.Topomap(topo, 'A%B', ds=ds, axw=2) p.close()
def test_ols(): "Test NDVar.ols() method" from rpy2.robjects import r # simulate data ds = datasets.get_rand(True) n_times = len(ds['uts'].time) x = np.zeros(n_times) x[20:40] = np.hanning(20) utsc = ds.eval("uts.copy()") utsc.x += ds['Y'].x[:, None] * x[None, :] ds_ = Dataset() ds_['x'] = Var(ds['Y'].x) ds_['x2'] = ds_['x'] + np.random.normal(0, 1, ds.n_cases) # ols regression m1 = ds_['x'] b1 = utsc.ols(m1) res1 = utsc.residuals(m1) m2 = ds_.eval("x + x2") b2 = utsc.ols(m2) res2 = utsc.residuals(m2) # compare with R for i in xrange(n_times): ds_['y'] = Var(utsc.x[:, i]) ds_.to_r('ds') # 1 predictor r('lm1 <- lm(y ~ x, ds)') beta = r('coef(lm1)')[1] assert_almost_equal(b1.x[0, i], beta) res = r('residuals(lm1)') assert_array_almost_equal(res1.x[:, i], res) # 2 predictors r('lm2 <- lm(y ~ x + x2, ds)') beta = r('coef(lm2)')[1:] assert_array_almost_equal(b2.x[:, i], beta) res = r('residuals(lm2)') assert_array_almost_equal(res2.x[:, i], res) # 3d utsnd = ds['utsnd'] ds_['utsnd'] = utsnd b1 = ds_.eval("utsnd.ols(x)") res1 = ds_.eval("utsnd.residuals(x)") for i in xrange(len(b1.time)): ds_['y'] = Var(utsnd.x[:, 1, i]) ds_.to_r('ds') # 1 predictor r('lm1 <- lm(y ~ x, ds)') beta = r('coef(lm1)')[1] assert_almost_equal(b1.x[0, 1, i], beta) res = r('residuals(lm1)') assert_array_almost_equal(res1.x[:, 1, i], res)
def test_scatterplot(): "Test plot.uv.corrplot and lot.uv.regplot" plot.configure_backend(False, False) ds = datasets.get_rand() ds['cov'] = ds['Y'] + np.random.normal(0, 1, (60, )) plot.uv.Correlation('Y', 'cov', ds=ds) plot.uv.Correlation('Y', 'cov', 'A%B', ds=ds) plot.uv.Regression('Y', 'cov', ds=ds) plot.uv.Regression('Y', 'cov', 'A%B', ds=ds) plt.close('all')
def test_scatterplot(): "Test plot.uv.corrplot and lot.uv.regplot" plot.configure_backend(False, False) ds = datasets.get_rand() ds['cov'] = ds['Y'] + np.random.normal(0, 1, (60,)) plot.uv.Correlation('Y', 'cov', ds=ds) plot.uv.Correlation('Y', 'cov', 'A%B', ds=ds) plot.uv.Regression('Y', 'cov', ds=ds) plot.uv.Regression('Y', 'cov', 'A%B', ds=ds) plt.close('all')
def test_plot_butterfly(): "Test plot.TopoButterfly" plot.configure_backend(False, False) ds = datasets.get_rand(utsnd=True) p = plot.TopoButterfly('utsnd', ds=ds) p.close() p = plot.TopoButterfly('utsnd', ds=ds, vmax=0.2, w=2) p.close() p = plot.TopoButterfly('utsnd', 'A%B', ds=ds, axw=2) p.close() p = plot.TopoButterfly('utsnd', mark=[1, 2], ds=ds) p.close() p = plot.TopoButterfly('utsnd', mark=['1', '2'], ds=ds) p.close()
def test_print(): "Run the string representation methods" ds = datasets.get_rand() print ds print repr(ds) A = ds['A'] print A print repr(A) Y = ds['Y'] print Y print repr(Y) Ynd = ds['uts'] print Ynd print repr(Ynd)
def test_io_pickle(): "Test io by pickling" ds = datasets.get_rand() ds.info['info'] = "Some very useful information about the Dataset" tempdir = tempfile.mkdtemp() try: dest = os.path.join(tempdir, 'test.pickled') with open(dest, 'wb') as fid: pickle.dump(ds, fid, protocol=pickle.HIGHEST_PROTOCOL) with open(dest, 'rb') as fid: ds2 = pickle.load(fid) finally: shutil.rmtree(tempdir) assert_dataset_equal(ds, ds2)
def test_multi(): "Test plot.SensorMaps" plot.configure_backend(False, False) ds = datasets.get_rand(utsnd=True) Y = ds['utsnd'] p = plot.SensorMaps(Y) roi = [1, 2] p.set_selection(roi) roi2 = p.get_selection() test_range = np.arange(3) assert_array_equal(test_range[roi2], test_range[roi], "ROI changed after " "set/get") p.close()
def test_ttest_ind(): "Test testnd.ttest_ind()" ds = datasets.get_rand() # basic res = testnd.ttest_ind('uts', 'A', 'a1', 'a0', ds=ds) repr(res) assert_less(res.p_uncorrected.min(), 0.05) # persistence string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL) res_ = pickle.loads(string) repr(res_) assert_dataobj_equal(res.p_uncorrected, res_.p_uncorrected) # cluster res = testnd.ttest_ind('uts', 'A', 'a1', 'a0', ds=ds, tail=1, samples=1) # persistence string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL) res_ = pickle.loads(string) assert_equal(repr(res_), repr(res)) assert_dataobj_equal(res.p_uncorrected, res_.p_uncorrected)
def test_map2d(): "Test plot.SensorMap2d" plot.configure_backend(False, False) ds = datasets.get_rand(utsnd=True) Y = ds['utsnd'] p = plot.SensorMap2d(Y) # plot attributes p.set_label_color('g') p.set_label_text('idx') # connectivity p.show_connectivity() p.show_connectivity(None) # mark sensors p.mark_sensors([1, 2]) p.mark_sensors([0]) p.remove_markers() p.close()
def test_ndvar(): "Test the NDVar class" ds = datasets.get_rand(utsnd=True) x = ds['utsnd'] # slicing assert_raises(KeyError, x.sub, sensor='5') assert_equal(x.sub(sensor='4').ndim, 2) assert_equal(x.sub(sensor=['4']).ndim, 3) assert_equal(x.sub(case=1, sensor='4').ndim, 1) # baseline correction x_bl = x - x.summary(time=(None, 0)) # assert that the baseline is 0 bl = x_bl.summary('case', 'sensor', time=(None, 0)) ok_(abs(bl) < 1e-10, "Baseline correction") # NDVar as index sens_mean = x.mean(('case', 'time')) idx = sens_mean > 0 pos = sens_mean[idx] assert_array_equal(pos.x > 0, True)
def test_ttest_rel(): "Test testnd.ttest_rel()" ds = datasets.get_rand() # basic res = testnd.ttest_rel('uts', 'A%B', ('a1', 'b1'), ('a0', 'b0'), 'rm', ds=ds) repr(res) # persistence string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL) res_ = pickle.loads(string) repr(res_) assert_equal(repr(res_), repr(res)) assert_dataobj_equal(res.p_uncorrected, res_.p_uncorrected) # collapsing cells res2 = testnd.ttest_rel('uts', 'A', 'a1', 'a0', 'rm', ds=ds) assert_less(res2.p_uncorrected.min(), 0.05) assert_equal(res2.n, res.n)
def test_aggregate(): "Test aggregation methods" ds = datasets.get_rand() # don't handle inconsistencies silently assert_raises(ValueError, ds.aggregate, 'A%B') dsa = ds.aggregate('A%B', drop_bad=True) assert_array_equal(dsa['n'], [15, 15, 15, 15]) idx1 = ds.eval("logical_and(A=='a0', B=='b0')") assert_equal(dsa['Y', 0], ds['Y', idx1].mean()) # unequal cell counts ds = ds[:-3] dsa = ds.aggregate('A%B', drop_bad=True) assert_array_equal(dsa['n'], [15, 15, 15, 12]) idx1 = ds.eval("logical_and(A=='a0', B=='b0')") assert_equal(dsa['Y', 0], ds['Y', idx1].mean()) dsa = ds.aggregate('A%B', drop_bad=True, equal_count=True) assert_array_equal(dsa['n'], [12, 12, 12, 12]) idx1_12 = np.logical_and(idx1, idx1.cumsum() <= 12) assert_equal(dsa['Y', 0], ds['Y', idx1_12].mean())
def test_stat(): "test plot.UTSStat plotting function" plot.configure_backend(False, False) ds = datasets.get_rand() p = plot.UTSStat('uts', ds=ds) p.close() p = plot.UTSStat('uts', 'A%B', ds=ds) p.close() p = plot.UTSStat('uts', 'A', Xax='B', ds=ds) p.close() p = plot.UTSStat('uts', 'A%B', 'rm', sub="rm.isin(('R00', 'R01'))", ds=ds) p.close() # clusters sds = ds.sub("B == 'b0'") res = testnd.ttest_rel('uts', 'A', 'a1', 'a0', match='rm', ds=sds, samples=0, pmin=0.05, mintime=0.02) p = plot.UTSStat('uts', 'A', clusters=res.clusters, ds=ds) p.close() res = testnd.ttest_rel('uts', 'A', 'a1', 'a0', match='rm', ds=sds, samples=100, pmin=0.05, mintime=0.02) p = plot.UTSStat('uts', 'A', clusters=res.clusters, ds=ds) p.close()
def test_anova_incremental(): "Test testnd.anova() with incremental f-tests" ds = datasets.get_rand() testnd.anova('uts', 'A*B', ds=ds[3:], pmin=0.05, samples=10)
def test_t_contrast(): ds = datasets.get_rand() # test aux functions y = np.arange(9.).reshape((3, 3)) indexes = {'a': 0, 'b': 1, 'c': 2} contrast = "+sum(a>c, b>c)" contrast_ = _testnd._parse_t_contrast(contrast) assert_equal(contrast_, ('func', '+', np.sum, [('comp', None, 'a', 'c'), ('comp', None, 'b', 'c')])) contrast = "+sum(a>*, b>*)" contrast_ = _testnd._parse_t_contrast(contrast) assert_equal(contrast_, ('func', '+', np.sum, [('comp', None, 'a', '*'), ('comp', None, 'b', '*')])) _, cells = _testnd._t_contrast_rel_properties(contrast_) pc, mc = _testnd._t_contrast_rel_expand_cells(cells, ('a', 'b', 'c')) data = _testnd._t_contrast_rel_data(y, indexes, pc, mc) assert_array_equal(data['a'], np.arange(3.)) assert_array_equal(data['*'], y.mean(0)) assert_raises(ValueError, _testnd._t_contrast_rel_expand_cells, cells, ('a|c', 'b|c', 'c|c')) # simple contrast res = testnd.t_contrast_rel('uts', 'A', 'a1>a0', 'rm', ds=ds, samples=10, pmin=0.05) repr(res) res_ = testnd.ttest_rel('uts', 'A', 'a1', 'a0', 'rm', ds=ds) assert_array_equal(res.t.x, res_.t.x) assert_in('samples', repr(res)) # complex contrast res = testnd.t_contrast_rel('uts', 'A%B', 'min(a0|b0>a1|b0, a0|b1>a1|b1)', 'rm', ds=ds, samples=10, pmin=0.05) res_b0 = testnd.ttest_rel('uts', 'A%B', ('a0', 'b0'), ('a1', 'b0'), 'rm', ds=ds) res_b1 = testnd.ttest_rel('uts', 'A%B', ('a0', 'b1'), ('a1', 'b1'), 'rm', ds=ds) assert_array_equal(res.t.x, np.min([res_b0.t.x, res_b1.t.x], axis=0)) # persistence string = pickle.dumps(res, protocol=pickle.HIGHEST_PROTOCOL) res_ = pickle.loads(string) assert_equal(repr(res_), repr(res)) assert_dataobj_equal(res.p, res_.p) # contrast with "*" contrast_star = '+min(a1|b0>a0|*, a1|b1>a0|*)' res = testnd.t_contrast_rel('uts', 'A%B', contrast_star, 'rm', ds=ds)
def test_ttest_1samp(): "Test testnd.ttest_1samp()" ds = datasets.get_rand(True) # no clusters res0 = testnd.ttest_1samp('uts', sub="A == 'a0'", ds=ds) assert_less(res0.p_uncorrected.min(), 0.05) repr0 = repr(res0) assert_in("'uts'", repr0) assert_not_in('clusters', repr0) assert_not_in('mintime', repr0) # clusters without resampling res1 = testnd.ttest_1samp('uts', sub="A == 'a0'", ds=ds, samples=0, pmin=0.05, tstart=0, tstop=0.6, mintime=0.05) assert_equal(res1.clusters.n_cases, 2) assert_not_in('p', res1.clusters) repr1 = repr(res1) assert_in('clusters', repr1) assert_in('samples', repr1) assert_in('mintime', repr1) # persistence string = pickle.dumps(res1, pickle.HIGHEST_PROTOCOL) res1_ = pickle.loads(string) assert_equal(repr(res1_), repr1) assert_dataobj_equal(res1.p_uncorrected, res1_.p_uncorrected) # clusters with resampling res2 = testnd.ttest_1samp('uts', sub="A == 'a0'", ds=ds, samples=10, pmin=0.05, tstart=0, tstop=0.6, mintime=0.05) assert_equal(res2.clusters.n_cases, 2) assert_equal(res2.samples, 10) assert_in('p', res2.clusters) repr2 = repr(res2) assert_in('samples', repr2) # clusters with permutations dss = ds.sub("logical_and(A=='a0', B=='b0')")[:8] res3 = testnd.ttest_1samp('uts', sub="A == 'a0'", ds=dss, samples=10000, pmin=0.05, tstart=0, tstop=0.6, mintime=0.05) assert_equal(res3.clusters.n_cases, 2) assert_equal(res3.samples, -1) assert_less(res3.clusters['p'].x.min(), 0.05) repr3 = repr(res3) assert_in('samples', repr3) # TFCE properties res = testnd.ttest_1samp('utsnd', sub="A == 'a0'", ds=ds, samples=1) string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL) res = pickle.loads(string) tfce_clusters = res._clusters(pmin=0.05) peaks = res.find_peaks() assert_equal(tfce_clusters.eval("p.min()"), peaks.eval("p.min()")) masked = res.masked_parameter_map(pmin=0.05) assert_array_equal(masked.abs().x <= res.t.abs().x, True)
def test_ndvar_summary_methods(): "Test NDVar methods for summarizing data over axes" ds = datasets.get_rand(utsnd=True) x = ds['utsnd'] dim = 'sensor' axis = x.get_axis(dim) dims = ('case', 'sensor') axes = tuple(x.get_axis(d) for d in dims) idx = x > 0 x0 = x[0] idx0 = idx[0] xsub = x.sub(time=(0, 0.5)) idxsub = xsub > 0 idx1d = x.mean(('case', 'time')) > 0 # numpy functions assert_equal(x.any(), x.x.any()) assert_array_equal(x.any(dim), x.x.any(axis)) assert_array_equal(x.any(dims), x.x.any(axes)) assert_array_equal(x.any(idx0), [x_[idx0.x].any() for x_ in x.x]) assert_array_equal(x.any(idx), [x_[i].any() for x_, i in izip(x.x, idx.x)]) assert_array_equal(x0.any(idx0), x0.x[idx0.x].any()) assert_array_equal(x.any(idxsub), xsub.any(idxsub)) assert_array_equal(x.any(idx1d), x.x[:, idx1d.x].any(1)) assert_equal(x.max(), x.x.max()) assert_array_equal(x.max(dim), x.x.max(axis)) assert_array_equal(x.max(dims), x.x.max(axes)) assert_array_equal(x.max(idx0), [x_[idx0.x].max() for x_ in x.x]) assert_array_equal(x.max(idx), [x_[i].max() for x_, i in izip(x.x, idx.x)]) assert_array_equal(x0.max(idx0), x0.x[idx0.x].max()) assert_array_equal(x.max(idxsub), xsub.max(idxsub)) assert_array_equal(x.max(idx1d), x.x[:, idx1d.x].max(1)) assert_equal(x.mean(), x.x.mean()) assert_array_equal(x.mean(dim), x.x.mean(axis)) assert_array_equal(x.mean(dims), x.x.mean(axes)) assert_array_equal(x.mean(idx0), [x_[idx0.x].mean() for x_ in x.x]) assert_array_equal(x.mean(idx), [x_[i].mean() for x_, i in izip(x.x, idx.x)]) assert_array_equal(x0.mean(idx0), x0.x[idx0.x].mean()) assert_array_equal(x.mean(idxsub), xsub.mean(idxsub)) assert_array_equal(x.mean(idx1d), x.x[:, idx1d.x].mean(1)) assert_equal(x.min(), x.x.min()) assert_array_equal(x.min(dim), x.x.min(axis)) assert_array_equal(x.min(dims), x.x.min(axes)) assert_array_equal(x.min(idx0), [x_[idx0.x].min() for x_ in x.x]) assert_array_equal(x.min(idx), [x_[i].min() for x_, i in izip(x.x, idx.x)]) assert_array_equal(x0.min(idx0), x0.x[idx0.x].min()) assert_array_equal(x.min(idxsub), xsub.min(idxsub)) assert_array_equal(x.min(idx1d), x.x[:, idx1d.x].min(1)) assert_equal(x.std(), x.x.std()) assert_array_equal(x.std(dim), x.x.std(axis)) assert_array_equal(x.std(dims), x.x.std(axes)) assert_array_equal(x.std(idx0), [x_[idx0.x].std() for x_ in x.x]) assert_array_equal(x.std(idx), [x_[i].std() for x_, i in izip(x.x, idx.x)]) assert_array_equal(x0.std(idx0), x0.x[idx0.x].std()) assert_array_equal(x.std(idxsub), xsub.std(idxsub)) assert_array_equal(x.std(idx1d), x.x[:, idx1d.x].std(1)) # non-numpy assert_equal(x.rms(), rms(x.x)) assert_array_equal(x.rms(dim), rms(x.x, axis)) assert_array_equal(x.rms(dims), rms(x.x, axes)) assert_array_equal(x.rms(idx0), [rms(x_[idx0.x]) for x_ in x.x]) assert_array_equal(x.rms(idx), [rms(x_[i]) for x_, i in izip(x.x, idx.x)]) assert_array_equal(x0.rms(idx0), rms(x0.x[idx0.x])) assert_array_equal(x.rms(idxsub), xsub.rms(idxsub)) assert_array_equal(x.rms(idx1d), rms(x.x[:, idx1d.x], 1))
def test_celltable(): "Test the Celltable class." ds = datasets.get_rand() ds['cat'] = Factor('abcd', rep=15) ct = Celltable('Y', 'A', ds=ds) eq_(ct.n_cases, 60) eq_(ct.n_cells, 2) ct = Celltable('Y', 'A', match='rm', ds=ds) eq_(ct.n_cases, 30) eq_(ct.n_cells, 2) ct = Celltable('Y', 'cat', cat=('c', 'b'), ds=ds) eq_(ct.n_cases, 30) eq_(ct.X[0], 'c') eq_(ct.X[-1], 'b') ct = Celltable('Y', 'A', match='rm', ds=ds) eq_(ct.n_cases, 30) assert np.all(ct.groups['a0'] == ct.groups['a1']) ct = Celltable('Y', 'cat', match='rm', cat=('c', 'b'), ds=ds) eq_(ct.n_cases, 30) eq_(ct.X[0], 'c') eq_(ct.X[-1], 'b') # coercion of numerical X X = ds.eval("A == 'a0'") ct = Celltable('Y', X, cat=(None, None), ds=ds) assert_equal(('False', 'True'), ct.cat) assert_array_equal(ct.data['True'], ds['Y', X]) ct = Celltable('Y', X, cat=(True, False), ds=ds) assert_equal(('True', 'False'), ct.cat) assert_array_equal(ct.data['True'], ds['Y', X]) # test coercion of Y ct = Celltable(ds['Y'].x, 'A', ds=ds) assert_is_instance(ct.Y, np.ndarray) ct = Celltable(ds['Y'].x, 'A', ds=ds, coercion=asvar) assert_is_instance(ct.Y, Var) # test sub ds_sub = ds.sub("A == 'a0'") ct_sub = Celltable('Y', 'B', ds=ds_sub) ct = Celltable('Y', 'B', sub="A == 'a0'", ds=ds) assert_dataobj_equal(ct_sub.Y, ct.Y) # test sub with rm ct_sub = Celltable('Y', 'B', match='rm', ds=ds_sub) ct = Celltable('Y', 'B', match='rm', sub="A == 'a0'", ds=ds) assert_dataobj_equal(ct_sub.Y, ct.Y) # test rm sorting ds = Dataset() ds['rm'] = Factor('abc', rep=4) ds['Y'] = Var(np.arange(3.).repeat(4)) ds['X'] = Factor('ab', rep=2, tile=3) idx = np.arange(12) np.random.shuffle(idx) ds = ds[idx] ct = Celltable('Y', 'X', 'rm', ds=ds) assert_array_equal(ct.match, Factor('abc', tile=2)) assert_array_equal(ct.Y, np.tile(np.arange(3.), 2)) assert_array_equal(ct.X, Factor('ab', rep=3))
def test_anova(): "Test testnd.anova()" plot.configure_backend(False, False) ds = datasets.get_rand(True) testnd.anova('utsnd', 'A*B', ds=ds) for samples in (0, 2): logger.info("TEST: samples=%r" % samples) testnd.anova('utsnd', 'A*B', ds=ds, samples=samples) testnd.anova('utsnd', 'A*B', ds=ds, samples=samples, pmin=0.05) testnd.anova('utsnd', 'A*B', ds=ds, samples=samples, tfce=True) res = testnd.anova('utsnd', 'A*B*rm', ds=ds, samples=0, pmin=0.05) repr(res) p = plot.Array(res) p.close() res = testnd.anova('utsnd', 'A*B*rm', ds=ds, samples=2, pmin=0.05) repr(res) p = plot.Array(res) p.close() # persistence string = pickle.dumps(res, protocol=pickle.HIGHEST_PROTOCOL) res_ = pickle.loads(string) assert_equal(repr(res_), repr(res)) # threshold-free res = testnd.anova('utsnd', 'A*B*rm', ds=ds, samples=10) repr(res) assert_in('A clusters', res.clusters.info) assert_in('B clusters', res.clusters.info) assert_in('A x B clusters', res.clusters.info) # no clusters res = testnd.anova('uts', 'B', sub="A=='a1'", ds=ds, samples=5, pmin=0.05, mintime=0.02) repr(res) assert_in('v', res.clusters) assert_in('p', res.clusters) # all effects with clusters res = testnd.anova('uts', 'A*B*rm', ds=ds, samples=5, pmin=0.05, tstart=0.1, mintime=0.02) assert_equal(set(res.clusters['effect'].cells), set(res.effects)) # some effects with clusters, some without res = testnd.anova('uts', 'A*B*rm', ds=ds, samples=5, pmin=0.05, tstart=0.37, mintime=0.02) string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL) res_ = pickle.loads(string) assert_dataobj_equal(res.clusters, res_.clusters) # test multi-effect results (with persistence) # UTS res = testnd.anova('uts', 'A*B*rm', ds=ds, samples=5) repr(res) string = pickle.dumps(res, pickle.HIGHEST_PROTOCOL) res = pickle.loads(string) tfce_clusters = res._clusters(pmin=0.05) peaks = res.find_peaks() assert_equal(tfce_clusters.eval("p.min()"), peaks.eval("p.min()")) unmasked = res.f[0] masked = res.masked_parameter_map(effect=0, pmin=0.05) assert_array_equal(masked.x <= unmasked.x, True)