Ejemplo n.º 1
0
def test_sample_condition_3():
    """ Test the experimental condition sampling -- oversampling=10
    """
    condition = ([1, 20, 36.5], [2, 2, 2], [1, 1, 1])
    frame_times = np.linspace(0, 49, 50)
    reg, rf = _sample_condition(condition, frame_times, oversampling=10,
                                min_onset=0)
    assert_almost_equal(reg.sum(), 60.)
    assert reg[10] == 1
    assert reg[380] == 1
    assert reg[210] == 1
    assert np.sum(reg > 0) == 60
    # check robustness to non-int oversampling
    reg_, rf_ = _sample_condition(condition, frame_times, oversampling=10.,
                                  min_onset=0)
    assert_almost_equal(reg, reg_)
Ejemplo n.º 2
0
def test_sample_condition_1():
    """ Test that the experimental condition is correctly sampled
    """
    condition = ([1, 20, 36.5], [0, 0, 0], [1, 1, 1])
    frame_times = np.linspace(0, 49, 50)
    reg, rf = _sample_condition(condition, frame_times, oversampling=1,
                                min_onset=0)
    assert reg.sum() == 3
    assert reg[1] == 1
    assert reg[20] == 1
    assert reg[37] == 1

    reg, rf = _sample_condition(condition, frame_times, oversampling=1)
    assert reg.sum() == 3
    assert reg[25] == 1
    assert reg[44] == 1
    assert reg[61] == 1
Ejemplo n.º 3
0
def test_sample_condition_5():
    """ Test the experimental condition sampling -- negative onset
    """
    condition = ([-10, 0, 36.5], [2, 2, 2], [1., -1., 5.])
    frame_times = np.linspace(0, 49, 50)
    reg, rf = _sample_condition(condition, frame_times, oversampling=1)
    assert reg.sum() == 10
    assert reg[14] == 1.
    assert reg[24] == -1.
    assert reg[61] == 5.
Ejemplo n.º 4
0
def test_sample_condition_7():
    """ Test the experimental condition sampling -- different onsets, overlapping offsets
    """
    condition = ([0, 10, 20], [11, 1, 1], [1., 1., 1.])
    frame_times = np.linspace(0, 49, 50)
    reg, rf = _sample_condition(condition, frame_times, oversampling=1)
    assert reg.sum() == 13
    assert reg[24] == 1.
    assert reg[34] == 2.
    assert reg[61] == 0.
Ejemplo n.º 5
0
def test_sample_condition_6():
    """ Test the experimental condition sampling -- overalapping onsets, different durations
    """
    condition = ([0, 0, 10], [1, 2, 1], [1., 1., 1.])
    frame_times = np.linspace(0, 49, 50)
    reg, rf = _sample_condition(condition, frame_times, oversampling=1)
    assert reg.sum() == 4
    assert reg[24] == 2.
    assert reg[34] == 1.
    assert reg[61] == 0.
Ejemplo n.º 6
0
def test_sample_condition_2():
    """ Test the experimental condition sampling -- onset = 0
    """
    condition = ([0, 20, 36.5], [2, 2, 2], [1, 1, 1])
    frame_times = np.linspace(0, 49, 50)
    reg, rf = _sample_condition(condition, frame_times, oversampling=1,
                                min_onset=- 10)
    assert reg.sum() == 6
    assert reg[10] == 1
    assert reg[48] == 1
    assert reg[31] == 1
Ejemplo n.º 7
0
def create_design_matrix(tr,
                         frame_times,
                         events,
                         hrf_model='kay',
                         hrf_idx=None):
    """ Creates a design matrix based on a HRF from Kendrick Kay's set
    or a default one from Nilearn. """

    # This is to keep oversampling consistent across hrf_models
    hrf_oversampling = 10
    design_oversampling = tr / (0.1 / hrf_oversampling)

    if hrf_model != 'kay':  # just use Nilearn!
        return make_first_level_design_matrix(frame_times,
                                              events,
                                              drift_model=None,
                                              min_onset=0,
                                              oversampling=design_oversampling,
                                              hrf_model=hrf_model)

    if hrf_model == 'kay':
        if hrf_idx is None:  # 20 different DMs (based on different HRFs)
            to_iter = range(HRFS_HR.shape[1])
        else:  # use the supplied HRF idx (e.g., 5)
            to_iter = [hrf_idx]

        dms = []  # will store all design matrices
        for hrf_idx in to_iter:  # iterate across all HRFs
            hrf = HRFS_HR[:, hrf_idx]
            # scale HRF to have the same max as the glover HRF
            # makes comparison easier
            hrf /= (hrf.max() / 0.249007)

            # Get info
            trial_type, onset, duration, modulation = check_events(events)

            # Pre-allocate design matrix; note: columns are alphabetically sorted
            X = np.zeros((frame_times.size, np.unique(trial_type).size))
            uniq_trial_types = np.unique(trial_type)  # this is sorted

            # Create separate regressor for each unique trial type
            # Code copied from Nilearn glm module
            for i, condition in enumerate(uniq_trial_types):
                condition_mask = (trial_type == condition)
                exp_condition = (onset[condition_mask],
                                 duration[condition_mask],
                                 modulation[condition_mask])
                # Create high resolution regressor/frame times
                hr_regressor, hr_frame_times = _sample_condition(
                    exp_condition, frame_times, design_oversampling, 0)

                # Convolve with HRF and downsample
                conv_reg = np.convolve(hr_regressor, hrf)[:hr_regressor.size]
                # linear interpolation for now ...
                f = interp1d(hr_frame_times, conv_reg)
                X[:, i] = f(frame_times).T

            # Note to self: do not scale such that max(X, axis=0) is 1, because you'll lose info
            # about predictor variance!
            dm = pd.DataFrame(X, columns=uniq_trial_types, index=frame_times)
            dm['constant'] = 1  # and intercept/constant
            dms.append(dm)

        if len(dms) == 1:
            # Just return single design matrix
            dms = dms[0]

        return dms