def test_io(): num_chans = 6 fnirs_data_folder = mne.datasets.fnirs_motor.data_path() fnirs_raw_dir = os.path.join(fnirs_data_folder, 'Participant-1') raw_intensity = mne.io.read_raw_nirx(fnirs_raw_dir).load_data() raw_intensity.resample(0.2) raw_od = mne.preprocessing.nirs.optical_density(raw_intensity) raw_haemo = mne.preprocessing.nirs.beer_lambert_law(raw_od) raw_haemo = mne_nirs.channels.get_long_channels(raw_haemo) raw_haemo.pick(picks=range(num_chans)) design_matrix = make_first_level_design_matrix(raw_intensity, hrf_model='spm', stim_dur=5.0, drift_order=3, drift_model='polynomial') glm_est = run_GLM(raw_haemo, design_matrix) df = glm_to_tidy(raw_haemo, glm_est, design_matrix) df = _tidy_long_to_wide(df) assert df.shape == (48, 11) assert set(df.columns) == { 'ch_name', 'condition', 'df', 'mse', 'p_value', 't', 'theta', 'Source', 'Detector', 'Chroma', 'Significant' } num_conds = 8 # triggers (1, 2, 3, 15) + 3 drifts + constant assert df.shape[0] == num_chans * num_conds contrast_matrix = np.eye(design_matrix.shape[1]) basic_conts = dict([(column, contrast_matrix[i]) for i, column in enumerate(design_matrix.columns)]) contrast_LvR = basic_conts['2.0'] - basic_conts['3.0'] contrast = mne_nirs.statistics.compute_contrast(glm_est, contrast_LvR) df = glm_to_tidy(raw_haemo, contrast, design_matrix) df = _tidy_long_to_wide(df) assert df.shape == (6, 10) assert set(df.columns) == { 'ch_name', 'ContrastType', 'z_score', 'stat', 'p_value', 'effect', 'Source', 'Detector', 'Chroma', 'Significant' } contrast = mne_nirs.statistics.compute_contrast(glm_est, contrast_LvR, contrast_type='F') df = glm_to_tidy(raw_haemo, contrast, design_matrix) df = _tidy_long_to_wide(df) assert df.shape == (6, 10) assert set(df.columns) == { 'ch_name', 'ContrastType', 'z_score', 'stat', 'p_value', 'effect', 'Source', 'Detector', 'Chroma', 'Significant' }
def test_GLM_system_test(): fnirs_data_folder = mne.datasets.fnirs_motor.data_path() fnirs_raw_dir = os.path.join(fnirs_data_folder, 'Participant-1') raw_intensity = mne.io.read_raw_nirx(fnirs_raw_dir).load_data() raw_intensity.resample(1.0) new_des = [des for des in raw_intensity.annotations.description] new_des = ['Control' if x == "1.0" else x for x in new_des] new_des = ['Tapping/Left' if x == "2.0" else x for x in new_des] new_des = ['Tapping/Right' if x == "3.0" else x for x in new_des] annot = mne.Annotations(raw_intensity.annotations.onset, raw_intensity.annotations.duration, new_des) raw_intensity.set_annotations(annot) raw_intensity.annotations.crop(35, 2967) raw_od = mne.preprocessing.nirs.optical_density(raw_intensity) raw_haemo = mne.preprocessing.nirs.beer_lambert_law(raw_od) short_chs = get_short_channels(raw_haemo) raw_haemo = get_long_channels(raw_haemo) design_matrix = make_first_level_design_matrix(raw_intensity, hrf_model='spm', stim_dur=5.0, drift_order=3, drift_model='polynomial') design_matrix["ShortHbO"] = np.mean( short_chs.copy().pick(picks="hbo").get_data(), axis=0) design_matrix["ShortHbR"] = np.mean( short_chs.copy().pick(picks="hbr").get_data(), axis=0) glm_est = run_GLM(raw_haemo, design_matrix) df = glm_to_tidy(raw_haemo, glm_est, design_matrix) df = _tidy_long_to_wide(df) a = (df.query('condition in ["Control"]').groupby(['condition', 'Chroma' ]).agg(['mean'])) # Make sure false positive rate is less than 5% assert a["Significant"].values[0] < 0.05 assert a["Significant"].values[1] < 0.05 a = (df.query('condition in ["Tapping/Left", "Tapping/Right"]').groupby( ['condition', 'Chroma']).agg(['mean'])) # Fairly arbitrary cutoff here, but its more than 5% assert a["Significant"].values[0] > 0.7 assert a["Significant"].values[1] > 0.7 assert a["Significant"].values[2] > 0.7 assert a["Significant"].values[3] > 0.7 left = [[1, 1], [1, 2], [1, 3], [2, 1], [2, 3], [2, 4], [3, 2], [3, 3], [4, 3], [4, 4]] right = [[5, 5], [5, 6], [5, 7], [6, 5], [6, 7], [6, 8], [7, 6], [7, 7], [8, 7], [8, 8]] groups = dict(Left_ROI=picks_pair_to_idx(raw_haemo, left), Right_ROI=picks_pair_to_idx(raw_haemo, right)) df = pd.DataFrame() for idx, col in enumerate(design_matrix.columns[:3]): df = df.append(glm_region_of_interest(glm_est, groups, idx, col)) assert df.shape == (12, 8)
def test_simulate_NIRS(): raw = simulate_nirs_raw(sfreq=3., amplitude=1., sig_dur=300., stim_dur=5., isi_min=15., isi_max=45.) assert 'hbo' in raw assert raw.info['sfreq'] == 3. assert raw.get_data().shape == (1, 900) assert np.max(raw.get_data()) < 1.2 * 1.e-6 assert raw.annotations.description[0] == 'A' assert raw.annotations.duration[0] == 5 assert np.min(np.diff(raw.annotations.onset)) > 15. + 5. assert np.max(np.diff(raw.annotations.onset)) < 45. + 5. with pytest.raises(AssertionError, match='Same number of'): raw = simulate_nirs_raw(sfreq=3., amplitude=[1., 2.], sig_dur=300., stim_dur=5., isi_min=15., isi_max=45.) raw = simulate_nirs_raw(sfreq=3., amplitude=[0., 2., 4.], annot_desc=['Control', 'Cond_A', 'Cond_B'], stim_dur=[5, 5, 5], sig_dur=900., isi_min=15., isi_max=45.) design_matrix = make_first_level_design_matrix(raw, stim_dur=5.0, drift_order=1, drift_model='polynomial') glm_est = run_GLM(raw, design_matrix) df = glm_to_tidy(raw, glm_est, design_matrix) df = _tidy_long_to_wide(df) assert df.query("condition in ['Control']")['theta'].values[0] == \ pytest.approx(0) assert df.query("condition in ['Cond_A']")['theta'].values[0] == \ pytest.approx(2e-6) assert df.query("condition in ['Cond_B']")['theta'].values[0] == \ pytest.approx(4e-6)
def test_io(): num_chans = 6 fnirs_data_folder = mne.datasets.fnirs_motor.data_path() fnirs_raw_dir = os.path.join(fnirs_data_folder, 'Participant-1') raw_intensity = mne.io.read_raw_nirx(fnirs_raw_dir).load_data() raw_intensity.resample(0.2) raw_intensity.annotations.description[:] = [ 'e' + d.replace('.', 'p') for d in raw_intensity.annotations.description ] raw_od = mne.preprocessing.nirs.optical_density(raw_intensity) raw_haemo = mne.preprocessing.nirs.beer_lambert_law(raw_od, ppf=0.1) raw_haemo = mne_nirs.channels.get_long_channels(raw_haemo) raw_haemo.pick(picks=range(num_chans)) design_matrix = make_first_level_design_matrix(raw_intensity, hrf_model='spm', stim_dur=5.0, drift_order=3, drift_model='polynomial') glm_est = run_glm(raw_haemo, design_matrix) df = glm_to_tidy(raw_haemo, glm_est.data, design_matrix) assert df.shape == (48, 12) assert set(df.columns) == { 'ch_name', 'Condition', 'df', 'mse', 'p_value', 't', 'theta', 'Source', 'Detector', 'Chroma', 'Significant', 'se' } num_conds = 8 # triggers (1, 2, 3, 15) + 3 drifts + constant assert df.shape[0] == num_chans * num_conds assert len(df["se"]) == 48 assert sum(df["se"]) > 0 # Check isn't nan assert len(df["df"]) == 48 assert sum(df["df"]) > 0 # Check isn't nan assert len(df["p_value"]) == 48 assert sum(df["p_value"]) > 0 # Check isn't nan assert len(df["theta"]) == 48 assert sum(df["theta"]) > 0 # Check isn't nan assert len(df["t"]) == 48 assert sum(df["t"]) > -99999 # Check isn't nan contrast_matrix = np.eye(design_matrix.shape[1]) basic_conts = dict([(column, contrast_matrix[i]) for i, column in enumerate(design_matrix.columns)]) contrast_LvR = basic_conts['e2p0'] - basic_conts['e3p0'] contrast = mne_nirs.statistics.compute_contrast(glm_est.data, contrast_LvR) df = glm_to_tidy(raw_haemo, contrast, design_matrix) assert df.shape == (6, 10) assert set(df.columns) == { 'ch_name', 'ContrastType', 'z_score', 'stat', 'p_value', 'effect', 'Source', 'Detector', 'Chroma', 'Significant' } contrast = mne_nirs.statistics.compute_contrast(glm_est.data, contrast_LvR, contrast_type='F') df = glm_to_tidy(raw_haemo, contrast, design_matrix, wide=False) df = _tidy_long_to_wide(df) assert df.shape == (6, 10) assert set(df.columns) == { 'ch_name', 'ContrastType', 'z_score', 'stat', 'p_value', 'effect', 'Source', 'Detector', 'Chroma', 'Significant' } with pytest.raises(TypeError, match="Unknown statistic type"): glm_to_tidy(raw_haemo, [1, 2, 3], design_matrix, wide=False)
# likely to change. These functions are marked with an underscore (_) # at the start of their name to indicate they are not public functions # and have no promise they will be here next week. # # .. sidebar:: Relevant literature # # Wickham, Hadley. "Tidy data." Journal of Statistical Software 59.10 (2014): 1-23. # # Here we export the data in a tidy pandas data frame. Data is exported in # long format by default. # However, a helper function is also provided to convert the long data to wide format. # The long to wide conversion also adds some additonal derived data, such as # if a significant response (p<0.05) was observed, which sensor and detector is # in the channel, which chroma, etc. df = glm_to_tidy(raw_haemo, glm_est, design_matrix) df = _tidy_long_to_wide(df) ############################################################################### # Determine true and false positive rates # --------------------------------------- # # We can query the exported data frames to determine the true and false # positive rates. Note: optodes cover a greater region than just the # motor cortex, so we dont expect 100% of channels to detect responses to # the tapping, but we do expect 5% or less for the false positive rate. (df.query('condition in ["Control", "Tapping/Left", "Tapping/Right"]').groupby( ['condition', 'Chroma']).agg(['mean']).drop(['df', 'mse', 'p_value', 't'], 1))