def test_irb_calibrate_mkdata_use_file(): """test using cals from dblocks in another file""" # make first h5 file pfx1 = "test1" h5f1 = IRB_DIR / "mkh5" / "calibrate_mkdata_use_file1.h5" mydat1 = mkh5.mkh5(h5f1) mydat1.reset_all() mydat1.create_mkdata(pfx1, *GET_IRB_MKDIG(pfx1)) # make second h5 file pfx2 = "test2" h5f2 = IRB_DIR / "mkh5" / "calibrate_mkdata_use_file2.h5" mydat2 = mkh5.mkh5(h5f2) mydat2.reset_all() # start fresh mydat2.create_mkdata(pfx2, *GET_IRB_MKDIG(pfx2)) # calibrate first with cals from second cal_args = copy.copy(CAL_ARGS) cal_args["use_cals"] = pfx2 cal_args["use_file"] = h5f2 mydat1.calibrate_mkdata(pfx1, **cal_args) os.remove(h5f1) os.remove(h5f2)
def test_irb_get_epochs(): subid = "test2" h5_f = IRB_DIR / "mkh5" / (subid + "get_epochs.h5") code_map_f = TEST_DIR("data/test2_items.xlsx!code_table") myh5 = mkh5.mkh5(h5_f) myh5.reset_all() # start fresh myh5.create_mkdata(subid, *GET_IRB_MKDIG(subid)) myh5.calibrate_mkdata(subid, **CAL_ARGS) # check the event_table -> epochs_table round trip event_table = myh5.get_event_table(code_map_f) myh5.set_epochs("epochs_short", event_table, -500, 1000) # fetch the table from self and check epochs_table = myh5.get_epochs_table("epochs_short", format="numpy") myh5._check_epochs_table(epochs_table) # look up the table by name and check myh5._h5_check_epochs_table_name(myh5.h5_fname, "epochs_short") # now the actual data epochs np_epochs, attrs = myh5.get_epochs("epochs_short", format="numpy") good_cols = ["dblock_ticks", "log_evcodes", "is_anchor", "MiPa"] epochs_good_cols, attrs = myh5.get_epochs("epochs_short", format="numpy", columns=["dblock_ticks"]) pd_epochs, attrs = myh5.get_epochs("epochs_short", format="pandas", columns=["dblock_ticks"]) os.remove(h5_f)
def test_irb_flat_cals(): pfx = "lexcon02" h5f = IRB_DIR / "mkh5" / (pfx + ".h5") myh5 = mkh5.mkh5(h5f) # start fresh myh5.reset_all() myh5.create_mkdata(pfx, *GET_IRB_MKDIG(pfx)) # # use same as calstest.sh pts, pulse, lo, hi, ccode = 3, 10, -30, 30, 0 try: myh5.calibrate_mkdata( pfx, n_points=pts, # pts to average, either side of cursor cal_size=pulse, # uV lo_cursor=lo, # lo_cursor ms hi_cursor=hi, # hi_cursor ms cal_ccode=ccode, use_cals=None, ) except ValueError as fail: fail_msg = fail.args[0] if ( "lexcon02.h5 channel 28:A2 has no cal pulses after " "trimming at median +/- 1.5IQR" ) in fail_msg: print("OK ... caught flat cals") else: print("failed to catch flat cals") raise
def test_irb_epochs_out_of_bounds(): subid = "test2" h5_f = IRB_DIR / "mkh5" / (subid + "epochs_out_of_bounds.h5") code_map_f = TEST_DIR("data/test2_items.xlsx!code_table") myh5 = mkh5.mkh5(h5_f) myh5.reset_all() # start fresh myh5.create_mkdata(subid, *GET_IRB_MKDIG(subid)) pts, pulse, lo, hi, ccode = 3, 10, -50, 50, 0 myh5.calibrate_mkdata(subid, **CAL_ARGS) event_table = myh5.get_event_table(code_map_f) assert event_table.shape == (120, 34) # out of bounds left edge myh5.set_epochs("oobl", event_table, -20000, 100) eptbl = myh5.get_epochs_table("oobl") oobl_epoch_idxs = [0, 7, 38, 64] # should be excluded assert all([idx not in oobl_epoch_idxs for idx in eptbl["epoch_id"]]) myh5.set_epochs("oobr", event_table, -100, 20000) eptbl = myh5.get_epochs_table("oobr") oobr_epoch_idxs = [1, 18, 57, 78, 84, 117] # should be excluded assert all([idx not in oobr_epoch_idxs for idx in eptbl["epoch_id"]]) os.remove(h5_f)
def test_irb_export_one_sub_epochs(): subid = "test2" h5_f = IRB_DIR / "mkh5" / (subid + "one_sub_eeg.h5") epochs_pfx = IRB_DIR / "mkh5" / (subid + "one_sub_epochs") code_map_f = TEST_DIR("data/test2_items.xlsx!code_table") myh5 = mkh5.mkh5(h5_f) myh5.reset_all() # start fresh myh5.create_mkdata(subid, *GET_IRB_MKDIG(subid)) pts, pulse, lo, hi, ccode = 3, 10, -50, 50, 0 myh5.calibrate_mkdata(subid, **CAL_ARGS) event_table = myh5.get_event_table(code_map_f) myh5.set_epochs("epochs_short", event_table, -500, 1000) # print('getting epochs data as pandas.Dataframe') # epochs, attrs = myh5.get_epochs('epochs_short', format='pandas') # print('getting epochs data as numpy.ndarray') # epochs, attrs = myh5.get_epochs('epochs_short', format='numpy') for fmt, ext in [("h5", "h5"), ("feather", "fthr"), ("txt", "txt")]: epochs_h5_f = f"{epochs_pfx}.{ext}" myh5.export_epochs("epochs_short", epochs_h5_f, file_format=fmt) os.remove(epochs_h5_f) os.remove(h5_f)
def test_irb_calibrate_negative_cals(): """test that calibration don't care that the pulse step is negative going. Spot checks MiPa""" pfx = "arquant3" h5f = IRB_DIR / "mkh5" / "negative_cals_test.h5" mydat = mkh5.mkh5(h5f) mydat.reset_all() mydat.create_mkdata(pfx, *GET_IRB_MKDIG(pfx)) with h5py.File(h5f, "r") as h5: MiPa_before = h5[pfx + "/dblock_0"]["MiPa"] mydat.calibrate_mkdata( pfx, n_points=3, # pts to average, either side of cursor cal_size=10, # uV polarity=1, # of calibration pulse lo_cursor=-50, # ms hi_cursor=50, cal_ccode=0, use_cals=None, ) with h5py.File(h5f, "r") as h5: MiPa_after = h5[pfx + "/dblock_0"]["MiPa"] assert all(np.sign(MiPa_before) == np.sign(MiPa_after))
def test_irb_special_cases(): ytbl_f = TEST_DIR("data/codemap_test.ytbl") subid = "test2" h5f = IRB_DIR / "mkh5" / (subid + "_event_table.h5") myh5 = mkh5.mkh5(h5f) myh5.reset_all() myh5.create_mkdata(subid, *GET_IRB_MKDIG(subid)) event_table = myh5.get_event_table(ytbl_f) # print(event_table.columns) cols_of_interest = [ "dblock_path", "regexp", "log_evcodes", "anchor_code", "match_code", ] for test in [ "neg_last", "neg_code", "neg_last_plus_one", "code_frag", "pos_last", "set_initial", "set_medial", ]: try: # print(event_table.loc[test, cols_of_interest]) event_table.loc[test, cols_of_interest] except Exception: print(f"{test} not found in event_table")
def test_reset_all(): """test reset_all, which must delete the contents of a file.""" # file should not exist already # h5f = TEST_DIR('data/test_reset_all.h5') if TEST_H5.exists(): os.remove(TEST_H5) # new file should be empty mydat = mkh5.mkh5(TEST_H5) with h5py.File(TEST_H5, "r") as h5: assert not h5.keys(), f"Keys found: {h5.keys()}, expected no keys." # add group so we can check that reset_all wipes it with h5py.File(TEST_H5, "r+") as h5: h5.create_group("test_group") assert h5.keys(), f"Expected to see test_group, but keys are empty." # reset_all should make the file empty again mydat.reset_all() with h5py.File(TEST_H5, "r") as h5: assert not h5.keys(), f"Keys found: {h5.keys()}, expected no keys." # cleanup os.remove(TEST_H5)
def test_irb_event_table_b(): subid = "test2" h5f = IRB_DIR / "mkh5" / (subid + "_event_table.h5") mydat = mkh5.mkh5(h5f) mydat.reset_all() # start fresh mydat.create_mkdata(subid, *GET_IRB_MKDIG(subid)) # sample code sequence pattern matches code_map_f = TEST_DIR("data/test2b.ytbl") # Excel header slicers DEPRECATED # header_chooser_f = 'data/test2_items.xlsx!header_chooser' header_chooser_f = TEST_DIR("data/test2.yhdx") print("get_event_table() *WITHOUT* header extraction") event_table = mydat.get_event_table(code_map_f) print("get_event_table() *WITH* header extraction") event_table = mydat.get_event_table(code_map_f, header_chooser_f) # pprint.pprint(event_table) # test export event print("exporting event table") mydat.export_event_table(event_table, TEST_DIR("data/test_event_table_b.fthr")) mydat.export_event_table(event_table, TEST_DIR("data/test_event_table_b.txt"), format="txt") # clean up os.remove(h5f)
def jitter_data(mkh5_f): """jitter EEG ata w/ random normal noise, skip cal data blocks""" h5_data = mkh5.mkh5(mkh5_f) dbps = h5_data.dblock_paths for dbp in dbps: hdr, data = h5_data.get_dblock(dbp) eeg_chans = [ stream for stream in hdr["streams"].keys() if "dig_chan" in hdr["streams"][stream]["source"] ] cal_dblocks = set([ hdr["streams"][eeg_chan]["cals"]["cal_dblock"][0] for eeg_chan in eeg_chans ]) # skip this dblock if it was used for calibration if any(dbp in cal_dblock for cal_dblock in cal_dblocks): print("not jittering cal dblock: ", dbp) continue else: print("jittering ", dbp) # add channel-wise scaled normal random variability with h5py.File(mkh5_f, 'r+') as raw_h5: for eeg_chan in eeg_chans: raw_h5[dbp][eeg_chan] += np.random.normal( loc=0, scale=raw_h5[dbp][eeg_chan].astype(float).std(), size=(len(raw_h5[dbp]), ), )
def test_irb_messy_event_table(): """table contains utf-16, NaN and mixed string-int column data""" subid = "cor01" cor_h5_f = IRB_DIR / "mkh5" / (subid + "create_epochs.h5") cor_h5 = mkh5.mkh5(cor_h5_f) cor_h5.reset_all() # start fresh pts, pulse, lo, hi, ccode = 3, 10, -50, 50, 0 cor_h5.create_mkdata(subid, *GET_IRB_MKDIG(subid)) cor_h5.calibrate_mkdata(subid, **CAL_ARGS) # simple test w/ utf-8 encoding colname = "Text_column" print("testing utf-8 xlsx") unicode_map_f = TEST_DIR("data/unicode_test.xlsx") unicode_test_xlsx = cor_h5.get_event_table(unicode_map_f) # this should succeed colname = "OK_Pre-Context" arry = cor_h5._pd_series_to_hdf5(unicode_test_xlsx[colname]) # this should fail try: colname = "Bad_Pre-Context" arry = cor_h5._pd_series_to_hdf5(unicode_test_xlsx[colname]) except Exception as fail: print("caught exception") pass # print('testing utf-8 csv') colname = "Text_column" unicode_map_f = TEST_DIR("data/unicode_test.csv") unicode_test_csv = cor_h5.get_event_table(unicode_map_f) arry = cor_h5._pd_series_to_hdf5(unicode_test_csv[colname]) # original messy print("testing bad utf-16 xlsx") cor_code_map_f = TEST_DIR("data/congorth_item.xlsx!test") cor_yhdx_f = TEST_DIR("data/cor01.yhdx") cor_event_table_a = cor_h5.get_event_table(cor_code_map_f, cor_yhdx_f) colname = "Pre-Context" try: cor_h5._pd_series_to_hdf5(cor_event_table_a[colname]) except Exception as fail: print("caught exception") pass try: cor_h5.set_epochs("cor_a", cor_event_table_a, -500, 1500) except mkh5.mkh5.EpochsTableDataError as fail: msg = f"caught epochs table data error: {cor_code_map_f}[cor_a]" print(msg) else: raise
def test_create_expt(): """mkh5 data multiple crw/log loader test""" mydat = mkh5.mkh5(TEST_H5) # start fresh mydat.reset_all() for fs in [S01, S05]: mydat.create_mkdata(fs["gid"], fs["eeg_f"], fs["log_f"], fs["yhdr_f"]) os.remove(TEST_H5)
def test_load_yhdr(yhdr): """load good yaml header files, fail informatively on bad""" mydat = mkh5.mkh5(TEST_H5) mydat.reset_all() mydat.create_mkdata(S01["gid"], S01["eeg_f"], S01["log_f"], yhdr) os.remove(TEST_H5)
def test_irb_calibrate_same_crw(): pfx = "test2" h5f = IRB_DIR / "mkh5" / "test_calibrate_same_crw.h5" stub_h5f = TEST_DIR("data/stub.h5") mydat = mkh5.mkh5(h5f) mydat.reset_all() # start fresh mydat.create_mkdata(pfx, *GET_IRB_MKDIG(pfx)) mydat.calibrate_mkdata(pfx, **CAL_ARGS) # report calibration scale factors direct from hdf5 file with h5py.File(h5f, "r") as h5: subid = pfx dblock_names = [ subid + "/dblock_" + str(b) for b in range(len(h5[subid].keys())) ] lo_gain_chans = ["lle", "lhz", "rle", "rhz", "MiPf", "LLPf", "RLPf"] stub = mkh5.mkh5(stub_h5f) hio = stub.HeaderIO() for dblock_name in dblock_names: hio.get(h5[dblock_name]) strms = hio.header["streams"] for k, v in strms.items(): if "dig_chan_" in v["source"]: scale_by = None scale_by = v["cals"]["scale_by"] print( "{0} {1:4s} {2:5.3f}".format( subid, v["name"], scale_by ) ) # ensure calibrating twice throws an error ... try: mydat.calibrate_mkdata(subid, **CAL_ARGS) except Exception as fail: print("OK ... caught attempted double calibration") else: raise RuntimeError( "uh oh ... failed to catch an attempted double calibration" ) os.remove(h5f) os.remove(stub_h5f)
def reset_h5(): cleanup_h5() myh5 = mkh5.mkh5(no_pg_h5) myh5.reset_all() myh5.create_mkdata(pfx, eeg_f, log_f, yhdr_f) event_table = myh5.get_event_table(ytbl_f) myh5.set_epochs("short_epochs", event_table, -100, 1000) myh5.set_epochs("medium_epochs", event_table, -500, 1500) myh5.set_epochs("long_epochs", event_table, -3000, 3000)
def test_plotcals(): """calibration param inspector routine with sensible default values""" mydat = mkh5.mkh5(TEST_H5) mydat.reset_all() # start fresh mydat.create_mkdata(S01["gid"], S01["eeg_f"], S01["log_f"], S01["yhdr_f"]) # This is a pre-calibration inspector ... viewer only like garv mydat.plotcals(TEST_H5, S01["gid"], **CAL_ARGS) os.remove(TEST_H5)
def test_calibrate_mkdata_use_cals_2(): """ draw from adlong """ mkh5_dir = Path(TEST_DIR("data")) resting_dir = mkh5_dir cals_dir = mkh5_dir subid = "s001" for state in ["ro", "rc"]: datagroup = subid + state h5_f = mkh5_dir / (datagroup + ".h5") # h5 filename this_h5 = mkh5.mkh5(h5_f) this_h5.reset_all() # 2. load .crw/.log data into .h5 file crw_f = resting_dir / (datagroup + ".crw") log_f = resting_dir / (datagroup + ".log") yhdr_f = mkh5_dir / (datagroup + ".yhdr") this_h5.create_mkdata(datagroup, crw_f, log_f, yhdr_f) # 3. load up cals into the same file in a *sister* datagroup cals = datagroup + "_cals" cal_crw_f = cals_dir / (subid + "c.crw") cal_log_f = cals_dir / (subid + "c.log") this_h5.create_mkdata(cals, cal_crw_f, cal_log_f, yhdr_f) # optionally plot the cals pts, pulse, lo, hi, ccode = 3, 10, -32, 32, 0 plot_cals = False if plot_cals: f, ax = this_h5.plotcals( h5_f, cals, n_points=pts, # pts to average cal_size=pulse, # uV lo_cursor=lo, # lo_cursor ms hi_cursor=hi, # hi_cursor ms cal_ccode=ccode, # condition code ) p = plt.show(f) # calibrate w/ the same params this_h5.calibrate_mkdata( datagroup, n_points=pts, cal_size=pulse, lo_cursor=lo, hi_cursor=hi, cal_ccode=ccode, use_cals=cals, ) # Ithis points to the cals data group os.remove(h5_f)
def test_create_mkdata(): """mkh5 data loader test""" try: mydat = mkh5.mkh5(TEST_H5) mydat.reset_all() mydat.create_mkdata( S01["gid"], S01["eeg_f"], S01["log_f"], S01["yhdr_f"] ) except Exception as fail: raise fail os.remove(TEST_H5)
def test_irb_negative_raw_evcodes(): """the mkh5 dblocks are split at pause marks defined as -16834 ... anything else should throw a warning. """ pfx = "lexcon01" h5f = IRB_DIR / "mkh5" / (pfx + ".h5") mydat = mkh5.mkh5(h5f) mydat.reset_all() # start fresh mydat.create_mkdata(pfx, *GET_IRB_MKDIG(pfx)) # print(mydat.info()) # very long os.remove(h5f)
def test_init(h5f): """test file creation and .h5 validity, must warn if file is unwritable.""" # file should not exist already # h5f = Path('data/test_init.h5') if Path(h5f).exists(): os.remove(h5f) # file should be created mkh5.mkh5(h5f) assert Path(h5f).is_file() # file must be a valid .h5 file with h5py.File(h5f, "r") as h5: assert h5.id.valid == 1 # user should be warned if file is unwritable os.chmod(h5f, 0o400) with pytest.warns(UserWarning): mkh5.mkh5(h5f) # cleanup os.remove(h5f)
def make_wr(test_name, paths, export_epochs=False): """continous word-recognition paradigm recording and epochs""" # set filenames crw = MDE_HOME / "mkdig/sub000wr.crw" # EEG recording log = MDE_HOME / "mkdig/sub000wr.x.log" # events yhdr = MDE_HOME / "mkpy/sub000wr.yhdr" # extra header info # set calibration data filenames cals_crw = MDE_HOME / "mkdig/sub000c.crw" cals_log = MDE_HOME / "mkdig/sub000c.x.log" cals_yhdr = MDE_HOME / "mkpy/sub000c.yhdr" # HDF5 file with EEG recording, events, and header # wr_h5_f = MDE_HOME / "multisub_data/sub000wr.h5" wr_h5_f = MDE_HOME / f"multisub_data/wr_{test_name}.h5" wr_h5 = mkh5.mkh5(wr_h5_f) wr_h5.reset_all() for path in paths: wr_h5.create_mkdata(path, crw, log, yhdr) wr_h5.append_mkdata(path, cals_crw, cals_log, cals_yhdr) wr_h5.calibrate_mkdata(path, **CAL_KWARGS) # randomly jitter EEG data jitter_data(wr_h5_f) # snapshot calibrated h5 file before tagging events no_epochs_h5_f = Path(str(wr_h5_f).replace('.h5', '_no_epoch_tables.h5')) shutil.copyfile(wr_h5_f, no_epochs_h5_f) # 1. scan the event codes into the event table wr_event_table = wr_h5.get_event_table(MDE_HOME / "mkpy/wr_codemap.xlsx") # define the epoch names and boundaries for epoch_name, (pre, post) in EPOCH_SPECS.items(): print(epoch_name, pre, post) # set the epoch name and boundaries wr_h5.set_epochs(epoch_name, wr_event_table, pre, post) # 3. optionally export epochs if export_epochs: for ffmt in FFORMATS: _fname = f"{str(wr_h5_f).replace('.h5','')}.{epoch_name}.epochs.{ffmt}" print(f"exporting wr {epoch_name} as {ffmt}: {_fname}") # 3. export the pochs wr_h5.export_epochs(epoch_name, _fname, file_format=ffmt)
def test_irb_hp301(): """ _find_events bombs out w/ an urbach slap """ subid = "hp301" h5_f = IRB_DIR / "mkh5" / (subid + ".h5") crw_f = IRB_DIR / "mkdig" / "hp301.crw" log_f = IRB_DIR / "mkdig" / "hp301.x.log" yhdr_f = IRB_DIR / "mkdig" / "hp301.yhdr" ytbl_f = TEST_DIR("data/HP3_Materials_PreScn_RegExp.xlsx!test") myh5 = mkh5.mkh5(h5_f) myh5.reset_all() myh5.create_mkdata("hp301", crw_f, log_f, yhdr_f) event_table = myh5.get_event_table(ytbl_f)
def test_with_log_events(log_f, wle): def read_log_txt(log_f_txt): log_data = pd.read_csv(log_f_txt, sep="\s+")[ ["evtcode", "clock_ticks", "ccode", "flags"] ] return log_data sid = S01["gid"] eeg_f = S01["eeg_f"] yhdr_f = S01["yhdr_f"] wle_test = mkh5.mkh5(TEST_H5) wle_test.reset_all() if wle is None: wle_test.create_mkdata(sid, eeg_f, log_f, yhdr_f) wle_test.append_mkdata(sid, eeg_f, log_f, yhdr_f) else: wle_test.create_mkdata(sid, eeg_f, log_f, yhdr_f, with_log_events=wle) wle_test.append_mkdata(sid, eeg_f, log_f, yhdr_f, with_log_events=wle) # check correctness if log_f is not None: log_data = read_log_txt(str(log_f) + ".txt") dbpaths = wle_test.dblock_paths for dbpath in dbpaths: _, data = wle_test.get_dblock(wle_test.dblock_paths[0]) dblock_raw_evcodes = data[data["raw_evcodes"] != 0]["raw_evcodes"] dblock_log_evcodes = data[data["log_evcodes"] != 0]["log_evcodes"] # read log codes from data file and confirm they match the # logcat2 text dumps if wle in ["as_is"]: assert all( dblock_log_evcodes == log_data["evtcode"][: len(dblock_log_evcodes)] ) if wle is "from_eeg": assert all(dblock_raw_evcodes == dblock_log_evcodes) if wle is "none": for col in ["log_evcodes", "log_ccodes", "log_flags"]: assert all( np.equal(0, data[col]) ), f"non-zero values in {col} with_log_events='none'" os.remove(TEST_H5)
def test_irb_event_code_0_in_log(): """crw 0's are non-event sample, log 0's shouldn't exist but occasionally do""" pfx = "cor03" h5f = IRB_DIR / "mkh5" / (pfx + ".h5") try: mydat = mkh5.mkh5(h5f) mydat.reset_all() # start fresh mydat.create_mkdata(pfx, *GET_IRB_MKDIG(pfx)) except Exception as fail: print("mkh5.create_mkdata() failed") raise fail os.remove(h5f)
def test_irb_append_mkdata(): """test appending separate .crw/.log to a data group: use case separate cals, split sessions. """ pfx = "test1" pfxcals = "test1cals" h5f = IRB_DIR / "mkh5" / "test_append_mkdata.h5" mydat = mkh5.mkh5(h5f) mydat.reset_all() # start fresh mydat.create_mkdata(pfx, *GET_IRB_MKDIG(pfx)) mydat.append_mkdata(pfx, *GET_IRB_MKDIG(pfxcals)) os.remove(h5f)
def make_p3(test_name, paths, export_epochs=False): """counterbalanced hi tone, low tone oddball recording and epochs""" # set filenames crw = MDE_HOME / "mkdig/sub000p3.crw" # EEG recording log = MDE_HOME / "mkdig/sub000p3.x.log" # events yhdr = MDE_HOME / "mkpy/sub000p3.yhdr" # extra header info # set calibration data filenames cals_crw = MDE_HOME / "mkdig/sub000c.crw" cals_log = MDE_HOME / "mkdig/sub000c.x.log" cals_yhdr = MDE_HOME / "mkpy/sub000c.yhdr" # HDF5 file with EEG recording, events, and header p3_h5_f = MDE_HOME / f"multisub_data/p3_{test_name}.h5" # build mkpy.mkh5 format data file and calibrate p3_h5 = mkh5.mkh5(p3_h5_f) p3_h5.reset_all() for path in paths: p3_h5.create_mkdata(path, crw, log, yhdr) p3_h5.append_mkdata(path, cals_crw, cals_log, cals_yhdr) p3_h5.calibrate_mkdata(path, **CAL_KWARGS) # randomly jitter EEG data jitter_data(p3_h5_f) # snapshot calibrated h5 file before tagging events no_epochs_h5_f = Path(str(p3_h5_f).replace('.h5', '_no_epoch_tables.h5')) shutil.copyfile(p3_h5_f, no_epochs_h5_f) # 1. scan events into the event table p3_event_table = p3_h5.get_event_table(MDE_HOME / "mkpy/p3_codemap.ytbl") for epoch_name, (pre, post) in EPOCH_SPECS.items(): print(epoch_name, pre, post) # 2. set the epoch specs p3_h5.set_epochs(epoch_name, p3_event_table, pre, post) # 3. optionally export epochs if export_epochs: for ffmt in FFORMATS: _fname = f"{str(p3_h5_f).replace('.h5','')}.{epoch_name}.epochs.{ffmt}" print(f"exporting p3 {epoch_name} as {ffmt}: {_fname}") p3_h5.export_epochs(epoch_name, _fname, file_format=ffmt)
def test_irb_load_code_map_files(path_type): # h5f = IRB_DIR / "mkh5" / (uuid.uuid4().hex + ".h5") h5group = "test2" h5f = IRB_DIR / "mkh5" / (h5group + "_test_load_codemap.h5") mydat = mkh5.mkh5(h5f) mydat.reset_all() # start fresh mydat.create_mkdata(h5group, *GET_IRB_MKDIG(h5group)) # load code mappers in different formats as Path and str cm_ytbl = mkh5.CodeTagger(path_type(TEST_DIR("data/design2.ytbl"))) cm_txt = mkh5.CodeTagger(path_type(TEST_DIR("data/design2.txt"))) cm_xlsx = mkh5.CodeTagger(path_type(TEST_DIR("data/design2.xlsx"))) cm_xlsx_named_sheet = mkh5.CodeTagger( path_type(TEST_DIR("data/design2.xlsx!code_map"))) # check for identity ... NB: nan == nan evaluates to False cms = [cm_ytbl, cm_txt, cm_xlsx, cm_xlsx_named_sheet] ncms = len(cms) for i, cm1 in enumerate(cms): for cm2 in cms[i + 1:]: print("-" * 40) print("# ", cm1.cmf) print(cm1.code_map) for c in cm1.code_map.columns: print(c, cm1.code_map[c].dtype) print("# ", cm2.cmf) print(cm2.code_map) for c in cm1.code_map.columns: print(c, cm1.code_map[c].dtype) same = cm1.code_map == cm2.code_map # print(same) diffs = np.where(same == False) for r in range(len(diffs[0])): idx = diffs[0][r] jdx = diffs[1][r] print("{0}[{1},{2}] --> {3}".format( cm1.cmf, idx, jdx, repr(cm1.code_map.iat[idx, jdx]))) print("{0}[{1},{2}] <-- {3}".format( cm2.cmf, idx, jdx, repr(cm2.code_map.iat[idx, jdx]))) print() os.remove(h5f)
def make_p50(test_name, paths, export_epochs=False): """paired click recording and data interchange epochs""" crw = MDE_HOME / "mkdig/sub000p5.crw" # EEG recording log = MDE_HOME / "mkdig/sub000p5.x.log" # events yhdr = MDE_HOME / "mkpy/sub000p5.yhdr" # extra header info # set calibration data filenames cals_crw = MDE_HOME / "mkdig/sub000c5.crw" cals_log = MDE_HOME / "mkdig/sub000c5.x.log" cals_yhdr = MDE_HOME / "mkpy/sub000c5.yhdr" # HDF5 file with EEG recording, events, and header p50_h5_f = MDE_HOME / f"multisub_data/p50_{test_name}.h5" p50_h5 = mkh5.mkh5(p50_h5_f) p50_h5.reset_all() for dgp in paths: p50_h5.create_mkdata(dgp, crw, log, yhdr) p50_h5.append_mkdata(dgp, cals_crw, cals_log, cals_yhdr) p50_h5.calibrate_mkdata(dgp, **CAL_KWARGS) # randomly swizzle EEG data a bit jitter_data(p50_h5_f) # snapshot calibrated h5 file before tagging events no_epochs_h5_f = Path(str(p50_h5_f).replace('.h5', '_no_epoch_tables.h5')) shutil.copyfile(p50_h5_f, no_epochs_h5_f) # 1. scan event code pattern tags into the event table p50_event_table = p50_h5.get_event_table(MDE_HOME / "mkpy/p50_codemap.ytbl") for epoch_name, (pre, post) in EPOCH_SPECS.items(): print(epoch_name, pre, post) # 2. set the epoch_table names and interval boundaries p50_h5.set_epochs(epoch_name, p50_event_table, pre, post) # 3. optionally export the epochs DATA ... EEG and events. if export_epochs: # multiple export formats for demonstration, in practice pick one format for ffmt in FFORMATS: _fname = f"{str(p50_h5_f).replace('.h5','')}.{epoch_name}.epochs.{ffmt}" print(f"exporting p50 {epoch_name} as {ffmt}: {_fname}") p50_h5.export_epochs(epoch_name, _fname, file_format=ffmt)
def test_irb_pcag_10y(): """mkh5._read_raw_log() failed on these files. The clock tick for the last log event is larger by a few ticks than the number of samples in the crw. This is a log/crw dimension mismatch in the number of samples and also the number of event codes. """ h5_f = IRB_DIR / "mkh5" / "pcag10ybug.h5" crw_f = IRB_DIR / "mkdig" / "pcag10y.crw" log_f = IRB_DIR / "mkdig" / "pcy10.x.log" yhdr_f = IRB_DIR / "mkdig" / "pcag10y.yhdr" myh5 = mkh5.mkh5(h5_f) myh5.reset_all() myh5.create_mkdata("10y", crw_f, log_f, yhdr_f)
def make_p3(): """counterbalanced hi tone, low tone oddball recording and epochs""" # set filenames crw = MDE_HOME / "mkdig/sub000p3.crw" # EEG recording log = MDE_HOME / "mkdig/sub000p3.x.log" # events yhdr = MDE_HOME / "mkpy/sub000p3.yhdr" # extra header info # set calibration data filenames cals_crw = MDE_HOME / "mkdig/sub000c.crw" cals_log = MDE_HOME / "mkdig/sub000c.x.log" cals_yhdr = MDE_HOME / "mkpy/sub000c.yhdr" # HDF5 file with EEG recording, events, and header p3_h5_f = MDE_HOME / "data/sub000p3.h5" # build mkpy.mkh5 format data file and calibrate p3_h5 = mkh5.mkh5(p3_h5_f) p3_h5.reset_all() p3_h5.create_mkdata("sub000", crw, log, yhdr) p3_h5.append_mkdata("sub000", cals_crw, cals_log, cals_yhdr) p3_h5.calibrate_mkdata("sub000", **CAL_KWARGS) # snapshot calibrated h5 file before tagging events no_epochs_h5_f = Path(str(p3_h5_f).replace('.h5', '_no_epoch_tables.h5')) shutil.copyfile(p3_h5_f, no_epochs_h5_f) # 1. scan events into the event table p3_event_table = p3_h5.get_event_table(MDE_HOME / "mkpy/p3_codemap.ytbl") for epoch_name, (pre, post) in EPOCH_SPECS.items(): print(epoch_name, pre, post) # 2. set the epoch specs p3_h5.set_epochs(epoch_name, p3_event_table, pre, post) # multiple export formats for demonstration, in practice pick one format for ffmt in FFORMATS: _fname = f"{MDE_HOME}/data/sub000p3.{epoch_name}.epochs.{ffmt}" print(f"exporting p3 {epoch_name} as {ffmt}: {_fname}") # 3. this exports the epochs DATA ... EEG and events. p3_h5.export_epochs(epoch_name, _fname, file_format=ffmt)