예제 #1
0
def individual_analysis(bids_path, ID):

    raw_intensity = read_raw_bids(bids_path=bids_path, verbose=False)

    # Convert signal to haemoglobin and resample
    raw_od = optical_density(raw_intensity)
    raw_haemo = beer_lambert_law(raw_od)
    raw_haemo.resample(0.3)

    # Cut out just the short channels for creating a GLM repressor
    sht_chans = get_short_channels(raw_haemo)
    raw_haemo = get_long_channels(raw_haemo)

    # Create a design matrix
    design_matrix = make_first_level_design_matrix(raw_haemo, stim_dur=5.0)

    # Append short channels mean to design matrix
    design_matrix["ShortHbO"] = np.mean(sht_chans.copy().pick(picks="hbo").get_data(), axis=0)
    design_matrix["ShortHbR"] = np.mean(sht_chans.copy().pick(picks="hbr").get_data(), axis=0)

    # Run GLM
    glm_est = run_GLM(raw_haemo, design_matrix)

    # Define channels in each region of interest
    # List the channel pairs manually
    left = [[4, 3], [1, 3], [3, 3], [1, 2], [2, 3], [1, 1]]
    right = [[6, 7], [5, 7], [7, 7], [5, 6], [6, 7], [5, 5]]
    # Then generate the correct indices for each pair
    groups = dict(
        Left_Hemisphere=picks_pair_to_idx(raw_haemo, left, on_missing='ignore'),
        Right_Hemisphere=picks_pair_to_idx(raw_haemo, right, on_missing='ignore'))

    # Extract channel metrics
    cha = glm_to_tidy(raw_haemo, glm_est, design_matrix)
    cha["ID"] = ID  # Add the participant ID to the dataframe

    # Compute region of interest results from channel data
    roi = pd.DataFrame()
    for idx, col in enumerate(design_matrix.columns):
        roi = roi.append(glm_region_of_interest(glm_est, groups, idx, col))
    roi["ID"] = ID  # Add the participant ID to the dataframe

    # Contrast left vs right tapping
    contrast_matrix = np.eye(design_matrix.shape[1])
    basic_conts = dict([(column, contrast_matrix[i])
                        for i, column in enumerate(design_matrix.columns)])
    contrast_LvR = basic_conts['Tapping/Left'] - basic_conts['Tapping/Right']
    contrast = compute_contrast(glm_est, contrast_LvR)
    con = glm_to_tidy(raw_haemo, contrast, design_matrix)
    con["ID"] = ID  # Add the participant ID to the dataframe

    # Convert to uM for nicer plotting below.
    cha["theta"] = [t * 1.e6 for t in cha["theta"]]
    roi["theta"] = [t * 1.e6 for t in roi["theta"]]
    con["effect"] = [t * 1.e6 for t in con["effect"]]

    return raw_haemo, roi, cha, con
예제 #2
0
def test_run_plot_GLM_topo():
    raw_intensity = _load_dataset()
    raw_intensity.crop(450, 600)  # Keep the test fast

    design_matrix = make_first_level_design_matrix(raw_intensity,
                                                   drift_order=1,
                                                   drift_model='polynomial')
    raw_od = mne.preprocessing.nirs.optical_density(raw_intensity)
    raw_haemo = mne.preprocessing.nirs.beer_lambert_law(raw_od)
    glm_estimates = run_GLM(raw_haemo, design_matrix)
    fig = plot_glm_topo(raw_haemo, glm_estimates, design_matrix)
    # 5 conditions (A,B,C,Drift,Constant) * two chroma + 2xcolorbar
    assert len(fig.axes) == 12

    # Two conditions * two chroma + 2 x colorbar
    fig = plot_glm_topo(raw_haemo,
                        glm_estimates,
                        design_matrix,
                        requested_conditions=['A', 'B'])
    assert len(fig.axes) == 6

    # Two conditions * one chroma + 1 x colorbar
    fig = plot_glm_topo(raw_haemo.copy().pick(picks="hbo"),
                        glm_estimates,
                        design_matrix,
                        requested_conditions=['A', 'B'])
    assert len(fig.axes) == 3

    # One conditions * two chroma + 2 x colorbar
    fig = plot_glm_topo(raw_haemo,
                        glm_estimates,
                        design_matrix,
                        requested_conditions=['A'])
    assert len(fig.axes) == 4

    # One conditions * one chroma + 1 x colorbar
    fig = plot_glm_topo(raw_haemo.copy().pick(picks="hbo"),
                        glm_estimates,
                        design_matrix,
                        requested_conditions=['A'])
    assert len(fig.axes) == 2

    # One conditions * one chroma + 0 x colorbar
    fig = plot_glm_topo(raw_haemo.copy().pick(picks="hbo"),
                        glm_estimates,
                        design_matrix,
                        colorbar=False,
                        requested_conditions=['A'])
    assert len(fig.axes) == 1

    # Ensure warning thrown if glm estimates is missing channels from raw
    glm_estimates_subset = {
        a: glm_estimates[a]
        for a in raw_haemo.ch_names[0:3]
    }
    with pytest.raises(RuntimeError, match="does not match regression"):
        plot_glm_topo(raw_haemo, glm_estimates_subset, design_matrix)
예제 #3
0
def test_GLM_system_test():
    fnirs_data_folder = mne.datasets.fnirs_motor.data_path()
    fnirs_raw_dir = os.path.join(fnirs_data_folder, 'Participant-1')
    raw_intensity = mne.io.read_raw_nirx(fnirs_raw_dir).load_data()
    raw_intensity.resample(1.0)
    new_des = [des for des in raw_intensity.annotations.description]
    new_des = ['Control' if x == "1.0" else x for x in new_des]
    new_des = ['Tapping/Left' if x == "2.0" else x for x in new_des]
    new_des = ['Tapping/Right' if x == "3.0" else x for x in new_des]
    annot = mne.Annotations(raw_intensity.annotations.onset,
                            raw_intensity.annotations.duration, new_des)
    raw_intensity.set_annotations(annot)
    raw_intensity.annotations.crop(35, 2967)
    raw_od = mne.preprocessing.nirs.optical_density(raw_intensity)
    raw_haemo = mne.preprocessing.nirs.beer_lambert_law(raw_od)
    short_chs = get_short_channels(raw_haemo)
    raw_haemo = get_long_channels(raw_haemo)
    design_matrix = make_first_level_design_matrix(raw_intensity,
                                                   hrf_model='spm',
                                                   stim_dur=5.0,
                                                   drift_order=3,
                                                   drift_model='polynomial')
    design_matrix["ShortHbO"] = np.mean(
        short_chs.copy().pick(picks="hbo").get_data(), axis=0)
    design_matrix["ShortHbR"] = np.mean(
        short_chs.copy().pick(picks="hbr").get_data(), axis=0)
    glm_est = run_GLM(raw_haemo, design_matrix)
    df = glm_to_tidy(raw_haemo, glm_est, design_matrix)
    df = _tidy_long_to_wide(df)
    a = (df.query('condition in ["Control"]').groupby(['condition', 'Chroma'
                                                       ]).agg(['mean']))
    # Make sure false positive rate is less than 5%
    assert a["Significant"].values[0] < 0.05
    assert a["Significant"].values[1] < 0.05
    a = (df.query('condition in ["Tapping/Left", "Tapping/Right"]').groupby(
        ['condition', 'Chroma']).agg(['mean']))
    # Fairly arbitrary cutoff here, but its more than 5%
    assert a["Significant"].values[0] > 0.7
    assert a["Significant"].values[1] > 0.7
    assert a["Significant"].values[2] > 0.7
    assert a["Significant"].values[3] > 0.7

    left = [[1, 1], [1, 2], [1, 3], [2, 1], [2, 3], [2, 4], [3, 2], [3, 3],
            [4, 3], [4, 4]]
    right = [[5, 5], [5, 6], [5, 7], [6, 5], [6, 7], [6, 8], [7, 6], [7, 7],
             [8, 7], [8, 8]]

    groups = dict(Left_ROI=picks_pair_to_idx(raw_haemo, left),
                  Right_ROI=picks_pair_to_idx(raw_haemo, right))

    df = pd.DataFrame()
    for idx, col in enumerate(design_matrix.columns[:3]):
        df = df.append(glm_region_of_interest(glm_est, groups, idx, col))

    assert df.shape == (12, 8)
예제 #4
0
파일: test_io.py 프로젝트: mshader/mne-nirs
def test_io():
    num_chans = 6
    fnirs_data_folder = mne.datasets.fnirs_motor.data_path()
    fnirs_raw_dir = os.path.join(fnirs_data_folder, 'Participant-1')
    raw_intensity = mne.io.read_raw_nirx(fnirs_raw_dir).load_data()
    raw_intensity.resample(0.2)
    raw_od = mne.preprocessing.nirs.optical_density(raw_intensity)
    raw_haemo = mne.preprocessing.nirs.beer_lambert_law(raw_od)
    raw_haemo = mne_nirs.channels.get_long_channels(raw_haemo)
    raw_haemo.pick(picks=range(num_chans))
    design_matrix = make_first_level_design_matrix(raw_intensity,
                                                   hrf_model='spm',
                                                   stim_dur=5.0,
                                                   drift_order=3,
                                                   drift_model='polynomial')
    glm_est = run_GLM(raw_haemo, design_matrix)
    df = glm_to_tidy(raw_haemo, glm_est, design_matrix)
    df = _tidy_long_to_wide(df)
    assert df.shape == (48, 11)
    assert set(df.columns) == {
        'ch_name', 'condition', 'df', 'mse', 'p_value', 't', 'theta', 'Source',
        'Detector', 'Chroma', 'Significant'
    }
    num_conds = 8  # triggers (1, 2, 3, 15) + 3 drifts + constant
    assert df.shape[0] == num_chans * num_conds

    contrast_matrix = np.eye(design_matrix.shape[1])
    basic_conts = dict([(column, contrast_matrix[i])
                        for i, column in enumerate(design_matrix.columns)])
    contrast_LvR = basic_conts['2.0'] - basic_conts['3.0']

    contrast = mne_nirs.statistics.compute_contrast(glm_est, contrast_LvR)
    df = glm_to_tidy(raw_haemo, contrast, design_matrix)
    df = _tidy_long_to_wide(df)
    assert df.shape == (6, 10)
    assert set(df.columns) == {
        'ch_name', 'ContrastType', 'z_score', 'stat', 'p_value', 'effect',
        'Source', 'Detector', 'Chroma', 'Significant'
    }

    contrast = mne_nirs.statistics.compute_contrast(glm_est,
                                                    contrast_LvR,
                                                    contrast_type='F')
    df = glm_to_tidy(raw_haemo, contrast, design_matrix)
    df = _tidy_long_to_wide(df)
    assert df.shape == (6, 10)
    assert set(df.columns) == {
        'ch_name', 'ContrastType', 'z_score', 'stat', 'p_value', 'effect',
        'Source', 'Detector', 'Chroma', 'Significant'
    }
예제 #5
0
def test_run_GLM():
    raw = simulate_nirs_raw(sig_dur=200, stim_dur=5.)
    design_matrix = make_first_level_design_matrix(raw,
                                                   stim_dur=5.,
                                                   drift_order=1,
                                                   drift_model='polynomial')
    glm_estimates = run_GLM(raw, design_matrix)

    assert len(glm_estimates) == len(raw.ch_names)

    # Check the estimate is correct within 10% error
    assert abs(glm_estimates["Simulated"].theta[0] - 1.e-6) < 0.1e-6

    # ensure we return the same type as nilearn to encourage compatibility
    _, ni_est = nilearn.glm.first_level.run_glm(
        raw.get_data(0).T, design_matrix.values)
    assert type(ni_est) == type(glm_estimates)
예제 #6
0
def test_run_plot_GLM_contrast_topo():
    raw_intensity = _load_dataset()
    raw_intensity.crop(450, 600)  # Keep the test fast

    design_matrix = make_first_level_design_matrix(raw_intensity,
                                                   drift_order=1,
                                                   drift_model='polynomial')
    raw_od = mne.preprocessing.nirs.optical_density(raw_intensity)
    raw_haemo = mne.preprocessing.nirs.beer_lambert_law(raw_od)
    glm_est = run_GLM(raw_haemo, design_matrix)
    contrast_matrix = np.eye(design_matrix.shape[1])
    basic_conts = dict([(column, contrast_matrix[i])
                        for i, column in enumerate(design_matrix.columns)])
    contrast_LvR = basic_conts['A'] - basic_conts['B']
    contrast = mne_nirs.statistics.compute_contrast(glm_est, contrast_LvR)
    fig = mne_nirs.visualisation.plot_glm_contrast_topo(raw_haemo, contrast)
    assert len(fig.axes) == 3
예제 #7
0
def test_simulate_NIRS():

    raw = simulate_nirs_raw(sfreq=3.,
                            amplitude=1.,
                            sig_dur=300.,
                            stim_dur=5.,
                            isi_min=15.,
                            isi_max=45.)
    assert 'hbo' in raw
    assert raw.info['sfreq'] == 3.
    assert raw.get_data().shape == (1, 900)
    assert np.max(raw.get_data()) < 1.2 * 1.e-6
    assert raw.annotations.description[0] == 'A'
    assert raw.annotations.duration[0] == 5
    assert np.min(np.diff(raw.annotations.onset)) > 15. + 5.
    assert np.max(np.diff(raw.annotations.onset)) < 45. + 5.

    with pytest.raises(AssertionError, match='Same number of'):
        raw = simulate_nirs_raw(sfreq=3.,
                                amplitude=[1., 2.],
                                sig_dur=300.,
                                stim_dur=5.,
                                isi_min=15.,
                                isi_max=45.)

    raw = simulate_nirs_raw(sfreq=3.,
                            amplitude=[0., 2., 4.],
                            annot_desc=['Control', 'Cond_A', 'Cond_B'],
                            stim_dur=[5, 5, 5],
                            sig_dur=900.,
                            isi_min=15.,
                            isi_max=45.)
    design_matrix = make_first_level_design_matrix(raw,
                                                   stim_dur=5.0,
                                                   drift_order=1,
                                                   drift_model='polynomial')
    glm_est = run_GLM(raw, design_matrix)
    df = glm_to_tidy(raw, glm_est, design_matrix)
    df = _tidy_long_to_wide(df)

    assert df.query("condition in ['Control']")['theta'].values[0] == \
        pytest.approx(0)
    assert df.query("condition in ['Cond_A']")['theta'].values[0] == \
        pytest.approx(2e-6)
    assert df.query("condition in ['Cond_B']")['theta'].values[0] == \
        pytest.approx(4e-6)
예제 #8
0
def test_run_plot_GLM_topo():
    raw_intensity = _load_dataset()
    raw_intensity.crop(450, 600)  # Keep the test fast

    design_matrix = make_first_level_design_matrix(raw_intensity,
                                                   drift_order=1,
                                                   drift_model='polynomial')
    raw_od = mne.preprocessing.nirs.optical_density(raw_intensity)
    raw_haemo = mne.preprocessing.nirs.beer_lambert_law(raw_od)
    glm_estimates = run_GLM(raw_haemo, design_matrix)
    fig = plot_glm_topo(raw_haemo, glm_estimates, design_matrix)
    # 5 conditions (A,B,C,Drift,Constant) * two chroma + 2xcolorbar
    assert len(fig.axes) == 12

    fig = plot_glm_topo(raw_haemo, glm_estimates, design_matrix,
                        requested_conditions=['A', 'B'])
    # Two conditions * two chroma + 2xcolorbar
    assert len(fig.axes) == 6
예제 #9
0
def test_run_GLM():
    raw = simulate_nirs_raw(sig_dur=200, stim_dur=5.)
    design_matrix = make_first_level_design_matrix(raw,
                                                   stim_dur=5.,
                                                   drift_order=1,
                                                   drift_model='polynomial')
    glm_estimates = run_glm(raw, design_matrix)

    # Test backwards compatibility
    with pytest.deprecated_call(match='more comprehensive'):
        old_res = run_GLM(raw, design_matrix)
    assert old_res.keys() == glm_estimates.data.keys()
    assert (old_res["Simulated"].theta == glm_estimates.data["Simulated"].theta
            ).all()

    assert len(glm_estimates) == len(raw.ch_names)

    # Check the estimate is correct within 10% error
    assert abs(glm_estimates.pick("Simulated").theta()[0][0] - 1.e-6) < 0.1e-6

    # ensure we return the same type as nilearn to encourage compatibility
    _, ni_est = nilearn.glm.first_level.run_glm(
        raw.get_data(0).T, design_matrix.values)
    assert isinstance(glm_estimates._data, type(ni_est))
예제 #10
0
                                               drift_model='polynomial')
fig, ax1 = plt.subplots(figsize=(10, 6), nrows=1, ncols=1)
fig = plot_design_matrix(design_matrix, ax=ax1)

###############################################################################
# Estimate response on clean data
# -------------------------------
#
# Here we run the GLM analysis on the clean data.
# The design matrix had three columns, so we get an estimate for our simulated
# event, the first order drift, and the constant.
# We see that the estimate of the first component is 4e-6 (4 uM),
# which was the amplitude we used in the simulation.
# We also see that the mean square error of the model fit is close to zero.

glm_est = run_GLM(raw, design_matrix)

print("Estimate:", glm_est['Simulated'].theta[0], "  MSE:",
      glm_est['Simulated'].MSE, "  Error (uM):",
      1e6 * (glm_est['Simulated'].theta[0] - amp * 1e-6))

###############################################################################
# Simulate noisy NIRS data (white)
# --------------------------------
#
# Real data has noise. Here we add white noise, this noise is not realistic
# but serves as a reference point for evaluating the estimation process.
# We run the GLM analysis exactly as in the previous section
# and plot the noisy data and the GLM fitted model.
# We print the response estimate and see that is close, but not exactly correct,
# we observe the mean square error is similar to the added noise.
예제 #11
0
#
# Fit GLM to subset of data and estimate response for each experimental condition
# -------------------------------------------------------------------------------
#
# .. sidebar:: Relevant literature
#
#    Huppert TJ. Commentary on the statistical properties of noise and its
#    implication on general linear models in functional near-infrared
#    spectroscopy. Neurophotonics. 2016;3(1)
#
# We run a GLM fit for the data and experiment matrix.
# First we analyse just the first two channels which correspond HbO and HbR
# of a single source detector pair.

data_subset = raw_haemo.copy().pick(picks=range(2))
glm_est = run_GLM(data_subset, design_matrix)

###############################################################################
#
# We then display the results. Note that the control condition sits
# around zero.
# And that the HbO is positive and larger than the HbR, this is to be expected.
# Further, we note that for this channel the response to tapping on the
# right hand is larger than the left. And the values are similar to what
# is seen in the epoching tutorial.

plt.scatter(design_matrix.columns[:3], glm_est['S1_D1 hbo'].theta[:3] * 1e6)
plt.scatter(design_matrix.columns[:3], glm_est['S1_D1 hbr'].theta[:3] * 1e6)
plt.xlabel("Experiment Condition")
plt.ylabel("Haemoglobin (μM)")
plt.legend(["Oxyhaemoglobin", "Deoxyhaemoglobin"])