def test_pickle(): ds = datasets.get_uts() ds_2 = load.unpickle(file_path('uts-py2.pickle')) assert_dataobj_equal(ds_2, ds) ds_3 = load.unpickle(file_path('uts-py3.pickle')) assert_dataobj_equal(ds_3, ds)
def test_pickle(): ds = datasets.get_uts() decimal = None if IS_OSX else 15 ds_2 = load.unpickle(file_path('uts-py2.pickle')) assert_dataobj_equal(ds_2, ds, decimal) ds_3 = load.unpickle(file_path('uts-py3.pickle')) assert_dataobj_equal(ds_3, ds, decimal)
def _load(self, tstep: float, filename: str, directory: Path): path = directory / f'{filename}.pickle' x = load.unpickle(path) # allow for pre-computed resampled versions if isinstance(x, list): xs = x for x in xs: if x.time.tstep == tstep: break else: raise IOError(f"{path.name} does not contain tstep={tstep!r}") elif isinstance(x, NDVar): if x.time.tstep == tstep: pass elif x.time.tstep > tstep: raise ValueError( f"Requested samplingrate rate is higher than in file ({1/tstep:g} > {1/x.time.tstep:g})" ) elif self.resample == 'bin': x = x.bin(tstep, label='start') elif self.resample == 'resample': srate = 1 / tstep int_srate = int(round(srate)) srate = int_srate if abs(int_srate - srate) < .001 else srate x = resample(x, srate) elif self.resample is None: raise RuntimeError( f"{path.name} has tstep={x.time.tstep}, not {tstep}") else: raise RuntimeError(f"resample={self.resample!r}") elif not isinstance(x, Dataset): raise TypeError(f'{x!r} at {path}') return x
def _load(self, path, tmin, tstep, n_samples, code, seed): x = load.unpickle(path) # allow for pre-computed resampled versions if isinstance(x, list): xs = x for x in xs: if x.time.tstep == tstep: break else: raise IOError( f"{os.path.basename(path)} does not contain tstep={tstep!r}" ) # continuous UTS if isinstance(x, NDVar): if x.time.tstep == tstep: pass elif self.resample == 'bin': x = x.bin(tstep, label='start') elif self.resample == 'resample': srate = 1 / tstep int_srate = int(round(srate)) srate = int_srate if abs(int_srate - srate) < .001 else srate x = resample(x, srate) elif self.resample is None: raise RuntimeError( f"{os.path.basename(path)} has tstep={x.time.tstep}, not {tstep}" ) else: raise RuntimeError(f"resample={self.resample!r}") x = pad(x, tmin, nsamples=n_samples) # NUTS elif isinstance(x, Dataset): ds = x if code.shuffle in ('permute', 'relocate'): rng = numpy.random.RandomState(seed) if code.shuffle == 'permute': index = ds['permute'].x assert index.dtype.kind == 'b' values = ds[index, 'value'].x rng.shuffle(values) ds[index, 'value'] = values else: rng.shuffle(ds['value'].x) code.register_shuffle() x = NDVar(numpy.zeros(n_samples), UTS(tmin, tstep, n_samples), name=code.code_with_rand) ds = ds[ds['time'] < x.time.tstop] for t, v in ds.zip('time', 'value'): x[t] = v else: raise TypeError(f'{x!r} at {path}') if code.shuffle in NDVAR_SHUFFLE_METHODS: x = shuffle(x, code.shuffle, code.shuffle_band, code.shuffle_angle) code.register_shuffle() return x
def rename(old, new): """Renames epoch rejection files. """ meg_path = os.path.join('/Volumes', 'Backup', 'sufAmb', 'meg') subjects = os.listdir(meg_path) if '.DS_Store' in subjects: subjects.remove('.DS_Store') for s in subjects: rejection_path = os.path.join(meg_path, s, 'epoch selection') if os.path.exists(os.path.join(rejection_path, old)): rejection_file = load.unpickle(os.path.join(rejection_path, old)) rejection_file.save_pickled(os.path.join(rejection_path, new))
def sig_clusters(dirc=_dirc, prefix='corr', suffix='', pmin=0.05): pikld = os.listdir(dirc) pikld = [p for p in pikld if p.startswith(prefix)==True] pikld = [p for p in pikld if p.endswith(suffix)==True] pikld = [p.replace('.pickled', '') for p in pikld] data = {p:load.unpickle(dirc+p+'.pickled') for p in pikld} data = {d:data[d] for d in data if hasattr(data[d], 'clusters')==True} # only keep results of cluster tests try: sig = {d:data[d] for d in data if data[d].clusters['p'].min() <= pmin} # ds with a/l one sig cluster sig = {d:[ data[d].clusters[c] ] for d in data for c in range(data[d].clusters.n_cases) if data[d].clusters[c]['p'] <= pmin } # retrieve sig clusters from tests # creates this structure: {v1: [c1, c2, ...], v2: [c1, c2, ...], ...} # where v is a regressor and c is a cluster NDVar (with info included) except ValueError: # no sig clusters print 'No significant clusters.' sig = {} return sig
def reg_clusters_t_src(mod, IV, factor=None, c1=None, c0=None): """ Cluster permutation (src x time) t-tests on regression coefficients. Example: mod='main-VerbGivenWord', IV='LogFre' """ dr = '/Volumes/Backup/sufAmb/regression/' f = dr+'ols_'+mod+'.pickled' data = load.unpickle(f) cond = "predictor=='%s'" % IV data = data.sub(cond) # smaller ds with only the predictor of interest # cluster permutation parameters Y='beta' match='subject' samples=100 pmin=0.05 tstart=0.13 tstop=0.45 mintime=0.03 minsource=10 if factor is None: test = testnd.ttest_1samp(Y=Y, ds=data, match=match, samples=samples, pmin=pmin, tstart=tstart, tstop=tstop, mintime=mintime, minsource=minsource) elif factor == 'main': test = testnd.ttest_1samp(Y=Y, ds=data.sub("condition=='main'"), match=match, samples=samples, pmin=pmin, tstart=tstart, tstop=tstop, mintime=mintime, minsource=minsource) else: test = testnd.ttest_rel(Y=Y, ds=data, X=factor, c1=c1, c0=c0, match=match, samples=samples, pmin=pmin, tstart=tstart, tstop=tstop, mintime=mintime, minsource=minsource) print "Finished cluster test: mod=%s, IV=%s, factor=%s, c1=%s, c0=%s" % (mod, IV, factor, c1, c0) path = "/Volumes/BackUp/sufAmb/reg-cluster_time_src/ols_mod-%s_IV-%s_factor-%s_c1-%s_c0-%s.pickled" \ % (mod, IV, factor, c1, c0) save.pickle(test, path) return test
#=============================================================================== # current format: [subject1, stem, stemS, stemEd], # [subject2, stem, stemS, stemEd], # ... # # new format: [subject1, stem], # [subject1, stemS], # ... # [subject2, stem], # ... #=============================================================================== dr = '/Volumes/Backup/sufAmb/pickled/' IVs = ['VerbGivenWord_nocov', 'VerbGivenWord', 'Ambiguity', 'WrdVerbyWeighted', 'WrdBiasWeighted'] for v in IVs: fil = dr+'ols_'+v+'.pickled' data = load.unpickle(fil) c1, c3 = [], [] c2 = ['all', 'stem', 'stemS', 'stemEd']*data.n_cases for s in range(data.n_cases): c1.extend( [ data['subject'][s] ]*4 ) c3.extend( [ data['all'][s], data['stem'][s], data['stemS'][s], data['stemEd'][s] ] ) c1 = Factor(c1) c2 = Factor(c2) c3 = combine(c3) newds = Dataset(('subject',c1), ('Type',c2), ('beta',c3), info=data.info) save.pickle(newds, fil)
def test_select_epochs(): "Test Select-Epochs GUI Document" set_log_level('warning', 'mne') ds = datasets.get_mne_sample(sns=True) tempdir = TempDir() path = os.path.join(tempdir, 'rej.pickled') # Test Document # ============= # create a file doc = Document(ds, 'meg') doc.set_path(path) doc.set_case(1, False, 'tag', None) doc.set_case(2, None, None, ['2']) doc.set_bad_channels([1]) # check modifications eq_(doc.accept[1], False) eq_(doc.tag[1], 'tag') eq_(doc.interpolate[1], []) eq_(doc.interpolate[2], ['2']) eq_(doc.bad_channels, [1]) assert_array_equal(doc.accept[2:], True) # save doc.save() # check the file ds_ = load.unpickle(path) eq_(doc.epochs.sensor._array_index(ds_.info['bad_channels']), [1]) # load the file ds = datasets.get_mne_sample(sns=True) doc = Document(ds, 'meg', path=path) # modification checks eq_(doc.accept[1], False) eq_(doc.tag[1], 'tag') eq_(doc.interpolate[1], []) eq_(doc.interpolate[2], ['2']) eq_(doc.bad_channels, [1]) assert_array_equal(doc.accept[2:], True) # Test Model # ========== ds = datasets.get_mne_sample(sns=True) doc = Document(ds, 'meg') model = Model(doc) # accept model.set_case(0, False, None, None) eq_(doc.accept[0], False) model.history.undo() eq_(doc.accept[0], True) model.history.redo() eq_(doc.accept[0], False) # interpolate model.toggle_interpolation(2, '3') eq_(doc.interpolate[2], ['3']) model.toggle_interpolation(2, '4') eq_(doc.interpolate[2], ['3', '4']) model.toggle_interpolation(2, '3') eq_(doc.interpolate[2], ['4']) model.toggle_interpolation(3, '3') eq_(doc.interpolate[2], ['4']) eq_(doc.interpolate[3], ['3']) model.history.undo() model.history.undo() eq_(doc.interpolate[2], ['3', '4']) eq_(doc.interpolate[3], []) model.history.redo() eq_(doc.interpolate[2], ['4']) # bad channels model.set_bad_channels([1]) model.set_bad_channels([1, 10]) eq_(doc.bad_channels, [1, 10]) model.history.undo() eq_(doc.bad_channels, [1]) model.history.redo() eq_(doc.bad_channels, [1, 10]) # reload to reset model.load(path) # tests eq_(doc.accept[1], False) eq_(doc.tag[1], 'tag') eq_(doc.interpolate[1], []) eq_(doc.interpolate[2], ['2']) eq_(doc.bad_channels, [1]) assert_array_equal(doc.accept[2:], True) # Test GUI # ======== frame = gui.select_epochs(ds) assert_false(frame.CanBackward()) ok_(frame.CanForward()) frame.OnForward(None)
def test_select_epochs(): "Test Select-Epochs GUI Document" set_log_level('warning', 'mne') ds = datasets.get_mne_sample(sns=True) tempdir = TempDir() path = os.path.join(tempdir, 'rej.pickled') # create a file doc = Document(ds, 'meg') doc.set_path(path) doc.set_case(1, False, 'tag', None) doc.set_case(2, None, None, ['2']) doc.set_bad_channels([1]) # check modifications eq_(doc.accept[1], False) eq_(doc.tag[1], 'tag') eq_(doc.interpolate[1], []) eq_(doc.interpolate[2], ['2']) eq_(doc.bad_channels, [1]) assert_array_equal(doc.accept[2:], True) # save doc.save() # check the file ds_ = load.unpickle(path) eq_(doc.epochs.sensor.dimindex(ds_.info['bad_channels']), [1]) # load the file ds = datasets.get_mne_sample(sns=True) doc = Document(ds, 'meg', path=path) # modification checks eq_(doc.accept[1], False) eq_(doc.tag[1], 'tag') eq_(doc.interpolate[1], []) eq_(doc.interpolate[2], ['2']) eq_(doc.bad_channels, [1]) assert_array_equal(doc.accept[2:], True) # Test model # ========== ds = datasets.get_mne_sample(sns=True) doc = Document(ds, 'meg') model = Model(doc) # accept model.set_case(0, False, None, None) eq_(doc.accept[0], False) model.history.undo() eq_(doc.accept[0], True) model.history.redo() eq_(doc.accept[0], False) # interpolate model.toggle_interpolation(2, '3') eq_(doc.interpolate[2], ['3']) model.toggle_interpolation(2, '4') eq_(doc.interpolate[2], ['3', '4']) model.toggle_interpolation(2, '3') eq_(doc.interpolate[2], ['4']) model.toggle_interpolation(3, '3') eq_(doc.interpolate[2], ['4']) eq_(doc.interpolate[3], ['3']) model.history.undo() model.history.undo() eq_(doc.interpolate[2], ['3', '4']) eq_(doc.interpolate[3], []) model.history.redo() eq_(doc.interpolate[2], ['4']) # bad channels model.set_bad_channels([1]) model.set_bad_channels([1, 10]) eq_(doc.bad_channels, [1, 10]) model.history.undo() eq_(doc.bad_channels, [1]) model.history.redo() eq_(doc.bad_channels, [1, 10]) # reload to reset model.load(path) # tests eq_(doc.accept[1], False) eq_(doc.tag[1], 'tag') eq_(doc.interpolate[1], []) eq_(doc.interpolate[2], ['2']) eq_(doc.bad_channels, [1]) assert_array_equal(doc.accept[2:], True)
def test_select_epochs(): "Test Select-Epochs GUI Document" set_log_level('warning', 'mne') data_path = mne.datasets.testing.data_path() raw_path = join(data_path, 'MEG', 'sample', 'sample_audvis_trunc_raw.fif') raw = mne.io.Raw(raw_path, preload=True).pick_types('mag', stim=True) ds = load.fiff.events(raw) ds['meg'] = load.fiff.epochs(ds, tmax=0.1) tempdir = TempDir() path = join(tempdir, 'rej.pickled') # Test Document # ============= # create a file doc = Document(ds, 'meg') doc.set_path(path) doc.set_case(1, False, 'tag', None) doc.set_case(2, None, None, ['2']) doc.set_bad_channels([1]) # check modifications assert doc.accept[1] == False assert doc.tag[1] == 'tag' assert doc.interpolate[1] == [] assert doc.interpolate[2] == ['2'] assert doc.bad_channels == [1] assert_array_equal(doc.accept[2:], True) # save doc.save() # check the file ds_ = load.unpickle(path) assert doc.epochs.sensor._array_index(ds_.info['bad_channels']) == [1] # load the file doc = Document(ds, 'meg', path=path) # modification checks assert doc.accept[1] == False assert doc.tag[1] == 'tag' assert doc.interpolate[1] == [] assert doc.interpolate[2] == ['2'] assert doc.bad_channels == [1] assert_array_equal(doc.accept[2:], True) # Test Model # ========== doc = Document(ds, 'meg') model = Model(doc) # accept model.set_case(0, False, None, None) assert doc.accept[0] == False model.history.undo() assert doc.accept[0] == True model.history.redo() assert doc.accept[0] == False # interpolate model.toggle_interpolation(2, '3') assert doc.interpolate[2] == ['3'] model.toggle_interpolation(2, '4') assert doc.interpolate[2] == ['3', '4'] model.toggle_interpolation(2, '3') assert doc.interpolate[2] == ['4'] model.toggle_interpolation(3, '3') assert doc.interpolate[2] == ['4'] assert doc.interpolate[3] == ['3'] model.history.undo() model.history.undo() assert doc.interpolate[2] == ['3', '4'] assert doc.interpolate[3] == [] model.history.redo() assert doc.interpolate[2] == ['4'] # bad channels model.set_bad_channels([1]) model.set_bad_channels([1, 10]) assert doc.bad_channels == [1, 10] model.history.undo() assert doc.bad_channels == [1] model.history.redo() assert doc.bad_channels == [1, 10] # reload to reset model.load(path) # tests assert doc.accept[1] == False assert doc.tag[1] == 'tag' assert doc.interpolate[1] == [] assert doc.interpolate[2] == ['2'] assert doc.bad_channels == [1] assert_array_equal(doc.accept[2:], True) # Test GUI # ======== frame = gui.select_epochs(ds, nplots=9) assert not frame.CanBackward() assert frame.CanForward() frame.OnForward(None) frame.SetVLim(1e-12)