コード例 #1
0
ファイル: test_modelgen.py プロジェクト: Alunisiira/nipype
def test_modelgen_spm_concat():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename2)
    s = SpecifySPMModel()
    s.inputs.input_units = 'secs'
    s.inputs.concatenate_runs = True
    setattr(s.inputs, 'output_units', 'secs')
    yield assert_equal, s.inputs.output_units, 'secs'
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.
    info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 170]], durations=[[1]]),
            Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]])]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 1
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 1
    yield assert_equal, np.sum(res.outputs.session_info[0]['regress'][0]['val']), 30
    yield assert_equal, len(res.outputs.session_info[0]['cond']), 1
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['onset']), np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0])
    setattr(s.inputs, 'output_units', 'scans')
    yield assert_equal, s.inputs.output_units, 'scans'
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['onset']), np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0])/6
    s.inputs.concatenate_runs = False
    s.inputs.subject_info = deepcopy(info)
    s.inputs.output_units = 'secs'
    res = s.run()
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['onset']), np.array([2.0, 50.0, 100.0, 170.0])
    rmtree(tempdir)
コード例 #2
0
ファイル: test_modelgen.py プロジェクト: vsaase/nipype
def test_modelgen_spm_concat():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename2)
    # Test case when only one duration is passed, as being the same for all onsets.
    s = SpecifySPMModel()
    s.inputs.input_units = 'secs'
    s.inputs.concatenate_runs = True
    setattr(s.inputs, 'output_units', 'secs')
    yield assert_equal, s.inputs.output_units, 'secs'
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.
    info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 170]], durations=[[1]]),
            Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]])]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 1
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 1
    yield assert_equal, np.sum(res.outputs.session_info[0]['regress'][0]['val']), 30
    yield assert_equal, len(res.outputs.session_info[0]['cond']), 1
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['onset']), np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0])
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['duration']), np.array([1., 1., 1., 1., 1., 1., 1., 1.])
    # Test case of scans as output units instead of seconds
    setattr(s.inputs, 'output_units', 'scans')
    yield assert_equal, s.inputs.output_units, 'scans'
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['onset']), np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6
    # Test case for no concatenation with seconds as output units
    s.inputs.concatenate_runs = False
    s.inputs.subject_info = deepcopy(info)
    s.inputs.output_units = 'secs'
    res = s.run()
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['onset']), np.array([2.0, 50.0, 100.0, 170.0])
    # Test case for variable number of events in separate runs, sometimes unique.
    filename3 = os.path.join(tempdir, 'test3.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 30), np.eye(4)).to_filename(filename3)
    s.inputs.functional_runs = [filename1, filename2, filename3]
    info = [Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2]], durations=[[1, 1], [1]]),
            Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2, 4]], durations=[[1, 1], [1, 1]]),
            Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2]], durations=[[1, 1], [1]])]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['duration']), np.array([1., 1.])
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][1]['duration']), np.array([1.,])
    yield assert_almost_equal, np.array(res.outputs.session_info[1]['cond'][1]['duration']), np.array([1., 1.])
    yield assert_almost_equal, np.array(res.outputs.session_info[2]['cond'][1]['duration']), np.array([1.,])
    # Test case for variable number of events in concatenated runs, sometimes unique.
    s.inputs.concatenate_runs = True
    info = [Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2]], durations=[[1, 1], [1]]),
            Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2, 4]], durations=[[1, 1], [1, 1]]),
            Bunch(conditions=['cond1', 'cond2'], onsets=[[2, 3], [2]], durations=[[1, 1], [1]])]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][0]['duration']), np.array([1., 1., 1., 1., 1., 1.])
    yield assert_almost_equal, np.array(res.outputs.session_info[0]['cond'][1]['duration']), np.array([1., 1., 1., 1.])
    rmtree(tempdir)
コード例 #3
0
def test_modelgen_spm_concat():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 30),
                np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 30),
                np.eye(4)).to_filename(filename2)
    s = SpecifySPMModel()
    s.inputs.input_units = 'secs'
    s.inputs.concatenate_runs = True
    setattr(s.inputs, 'output_units', 'secs')
    yield assert_equal, s.inputs.output_units, 'secs'
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.
    info = [
        Bunch(conditions=['cond1'],
              onsets=[[2, 50, 100, 170]],
              durations=[[1]]),
        Bunch(conditions=['cond1'],
              onsets=[[30, 40, 100, 150]],
              durations=[[1]])
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 1
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 1
    yield assert_equal, np.sum(
        res.outputs.session_info[0]['regress'][0]['val']), 30
    yield assert_equal, len(res.outputs.session_info[0]['cond']), 1
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['onset']), np.array(
            [2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0])
    setattr(s.inputs, 'output_units', 'scans')
    yield assert_equal, s.inputs.output_units, 'scans'
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['onset']), np.array(
            [2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6
    s.inputs.concatenate_runs = False
    s.inputs.subject_info = deepcopy(info)
    s.inputs.output_units = 'secs'
    res = s.run()
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['onset']), np.array(
            [2.0, 50.0, 100.0, 170.0])
    rmtree(tempdir)
コード例 #4
0
ファイル: test_modelgen.py プロジェクト: colinbuchanan/nipype
def test_modelgen_spm_concat():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir,'test1.nii')
    filename2 = os.path.join(tempdir,'test2.nii')
    Nifti1Image(np.random.rand(10,10,10,50), np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10,10,10,50), np.eye(4)).to_filename(filename2)
    s = SpecifySPMModel()
    s.inputs.input_units = 'secs'
    s.inputs.output_units = 'scans'
    s.inputs.concatenate_runs = True
    setattr(s.inputs, 'output_units', 'scans')
    yield assert_equal, s.inputs.output_units, 'scans'
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.
    info = [Bunch(conditions=['cond1'], onsets=[[2, 50, 100, 180]], durations=[[1]], amplitudes=None,
                  pmod=None, regressors = None, regressor_names = None, tmod=None),
            Bunch(conditions=['cond1'], onsets=[[30, 40, 100, 150]], durations=[[1]], amplitudes=None,
                  pmod=None, regressors = None, regressor_names = None, tmod=None)]
    s.inputs.subject_info = info
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 1
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 1
    yield assert_equal, len(res.outputs.session_info[0]['cond']), 1
    yield assert_almost_equal, res.outputs.session_info[0]['cond'][0]['onset'], [2.0, 50.0, 100.0, 180.0, 330.0, 340.0, 400.0, 450.0]
    rmtree(tempdir)
コード例 #5
0
def spm_model_specification(behavioral_data, fmri_sessions, onset_name,
                            condition_name, duration_name, time_repetition,
                            realignment_parameters, delimiter, start,
                            concatenate_runs, high_pass_filter_cutoff,
                            output_directory):
    """ Specify the SPM model used in the GLM and estimate the design matrix.

    .. note::

        * `fmri_sessions` and `behavioral_data` must have the same number
          of elements.
        * `onsets` and `durations` values must have the same units as the
          TR used in the processings (ie. seconds).

    <unit>
        <input name="behavioral_data" type="List" content="File" desc="list of
            .csv session behavioral data." />
        <input name="fmri_sessions" type="List" content="File" desc="list of
            path to fMRI sessions." />
        <input name="onset_name" type="String" desc="the name of the column
            in the `behavioral_data` file containing the onsets."/>
        <input name="condition_name" type="String" desc="the name of the
            column in the `behavioral_data` file containing the conditions."/>
        <input name="duration_name" type="String" desc="the name of the column
            in the `behavioral_data` file containing the condition durations.
            "/>
        <input name="time_repetition" type="Float" desc="the repetition time
            in seconds (in seconds)."/>
        <input name="realignment_parameters" type="File" desc="path to the SPM
            realign output parameters."/>
        <input name="delimiter" type="String" desc="separator used to split
            the `behavioral_data` file."/>
        <input name="start" type="Int" desc="line from which we start reading
            the `behavioral_data` file."/>
        <input name="concatenate_runs" type="Bool" desc="concatenate all runs
            to look like a single session."/>
        <input name="high_pass_filter_cutoff" type="Float" desc="high-pass
            filter cutoff in secs."/>
        <input name="output_directory" type="Directory" desc="Where to store
            the output file"/>
        <output name="session_info" type="Any" desc="session info to leverage
            the first level design."/>
        <output name="model_specifications" type="File" desc="file containing
            all model specifications" />
    </unit>
    """
    # Local imports
    from nipype.interfaces.base import Bunch
    from nipype.algorithms.modelgen import SpecifySPMModel

    # Assert that we have one behavioral data per session
    if len(behavioral_data) != len(fmri_sessions):
        raise ValueError("One behavioral data per session is required, "
                         "got {0} behaviral data and {1} session.".format(
                             len(behavioral_data), len(fmri_sessions)))

    # Get each session acquisition conditions
    info = []
    for csvfile in behavioral_data:

        # Parse the behavioural file
        all_onsets = get_onsets(csvfile,
                                condition_name, onset_name, duration_name,
                                delimiter, start)

        # Create a nipype Bunch (dictionary-like) structure
        conditions = []
        onsets = []
        durations = []
        for condition_name, item in all_onsets.items():
            conditions.append(condition_name)
            onsets.append([float(x) for x in item["onsets"]])
            durations.append([float(x) for x in item["durations"]])
        info.append(
            Bunch(conditions=conditions, onsets=onsets, durations=durations))

    # Make a model specification compatible with spm designer
    spec_interface = SpecifySPMModel(
        concatenate_runs=concatenate_runs,
        input_units="secs",
        output_units="secs",
        time_repetition=time_repetition,
        high_pass_filter_cutoff=high_pass_filter_cutoff,
        functional_runs=fmri_sessions,
        subject_info=info,
        realignment_parameters=realignment_parameters)
    spec_interface.run()

    # The previous interface use numpy in double precision. In order to be
    # python-json compliant need to cast expicitely all float items
    def cast_to_float(obj):
        """ Recursive method that cast numpy.double items.

        Parameters
        ----------
        obj: object
            a generic python object.

        Returns
        -------
        out: object
            the float-casted input object.
        """
        # Deal with dictionary
        if isinstance(obj, dict):
            out = {}
            for key, val in obj.items():
                out[key] = cast_to_float(val)

        # Deal with tuple and list
        elif isinstance(obj, (list, tuple)):
            out = []
            for val in obj:
                out.append(cast_to_float(val))
            if isinstance(obj, tuple):
                out = tuple(out)

        # Otherwise cast if it is a numpy.double
        else:
            out = obj
            if isinstance(obj, float):
                out = float(obj)

        return out

    session_info = cast_to_float(spec_interface.aggregate_outputs().get()[
        "session_info"])

    model_specifications = os.path.join(output_directory,
                                        "model_specifications.json")

    # save the design parameters
    with open(model_specifications, "w") as _file:
        json.dump(session_info, _file, indent=4)

    return session_info, model_specifications
コード例 #6
0
def test_modelgen_spm_concat(tmpdir):
    filename1 = tmpdir.join("test1.nii").strpath
    filename2 = tmpdir.join("test2.nii").strpath
    Nifti1Image(np.random.rand(10, 10, 10, 30),
                np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 30),
                np.eye(4)).to_filename(filename2)

    # Test case when only one duration is passed, as being the same for all onsets.
    s = SpecifySPMModel()
    s.inputs.input_units = "secs"
    s.inputs.concatenate_runs = True
    setattr(s.inputs, "output_units", "secs")
    assert s.inputs.output_units == "secs"
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.0
    info = [
        Bunch(conditions=["cond1"],
              onsets=[[2, 50, 100, 170]],
              durations=[[1]]),
        Bunch(conditions=["cond1"],
              onsets=[[30, 40, 100, 150]],
              durations=[[1]]),
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    assert len(res.outputs.session_info) == 1
    assert len(res.outputs.session_info[0]["regress"]) == 1
    assert np.sum(res.outputs.session_info[0]["regress"][0]["val"]) == 30
    assert len(res.outputs.session_info[0]["cond"]) == 1
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["onset"]),
        np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]),
    )
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["duration"]),
        np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]),
    )

    # Test case of scans as output units instead of seconds
    setattr(s.inputs, "output_units", "scans")
    assert s.inputs.output_units == "scans"
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["onset"]),
        np.array([2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6,
    )

    # Test case for no concatenation with seconds as output units
    s.inputs.concatenate_runs = False
    s.inputs.subject_info = deepcopy(info)
    s.inputs.output_units = "secs"
    res = s.run()
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["onset"]),
        np.array([2.0, 50.0, 100.0, 170.0]),
    )

    # Test case for variable number of events in separate runs, sometimes unique.
    filename3 = tmpdir.join("test3.nii").strpath
    Nifti1Image(np.random.rand(10, 10, 10, 30),
                np.eye(4)).to_filename(filename3)
    s.inputs.functional_runs = [filename1, filename2, filename3]
    info = [
        Bunch(conditions=["cond1", "cond2"],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]]),
        Bunch(
            conditions=["cond1", "cond2"],
            onsets=[[2, 3], [2, 4]],
            durations=[[1, 1], [1, 1]],
        ),
        Bunch(conditions=["cond1", "cond2"],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]]),
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["duration"]),
        np.array([1.0, 1.0]),
    )
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][1]["duration"]),
        np.array([1.0]))
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[1]["cond"][1]["duration"]),
        np.array([1.0, 1.0]),
    )
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[2]["cond"][1]["duration"]),
        np.array([1.0]))

    # Test case for variable number of events in concatenated runs, sometimes unique.
    s.inputs.concatenate_runs = True
    info = [
        Bunch(conditions=["cond1", "cond2"],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]]),
        Bunch(
            conditions=["cond1", "cond2"],
            onsets=[[2, 3], [2, 4]],
            durations=[[1, 1], [1, 1]],
        ),
        Bunch(conditions=["cond1", "cond2"],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]]),
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][0]["duration"]),
        np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0]),
    )
    npt.assert_almost_equal(
        np.array(res.outputs.session_info[0]["cond"][1]["duration"]),
        np.array([1.0, 1.0, 1.0, 1.0]),
    )
コード例 #7
0
ファイル: test_modelgen.py プロジェクト: dalejn/nipype
def test_modelgen_spm_concat():
    tempdir = mkdtemp()
    filename1 = os.path.join(tempdir, 'test1.nii')
    filename2 = os.path.join(tempdir, 'test2.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 30),
                np.eye(4)).to_filename(filename1)
    Nifti1Image(np.random.rand(10, 10, 10, 30),
                np.eye(4)).to_filename(filename2)
    # Test case when only one duration is passed, as being the same for all onsets.
    s = SpecifySPMModel()
    s.inputs.input_units = 'secs'
    s.inputs.concatenate_runs = True
    setattr(s.inputs, 'output_units', 'secs')
    yield assert_equal, s.inputs.output_units, 'secs'
    s.inputs.functional_runs = [filename1, filename2]
    s.inputs.time_repetition = 6
    s.inputs.high_pass_filter_cutoff = 128.
    info = [
        Bunch(conditions=['cond1'],
              onsets=[[2, 50, 100, 170]],
              durations=[[1]]),
        Bunch(conditions=['cond1'],
              onsets=[[30, 40, 100, 150]],
              durations=[[1]])
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_equal, len(res.outputs.session_info), 1
    yield assert_equal, len(res.outputs.session_info[0]['regress']), 1
    yield assert_equal, np.sum(
        res.outputs.session_info[0]['regress'][0]['val']), 30
    yield assert_equal, len(res.outputs.session_info[0]['cond']), 1
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['onset']), np.array(
            [2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0])
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['duration']), np.array(
            [1., 1., 1., 1., 1., 1., 1., 1.])
    # Test case of scans as output units instead of seconds
    setattr(s.inputs, 'output_units', 'scans')
    yield assert_equal, s.inputs.output_units, 'scans'
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['onset']), np.array(
            [2.0, 50.0, 100.0, 170.0, 210.0, 220.0, 280.0, 330.0]) / 6
    # Test case for no concatenation with seconds as output units
    s.inputs.concatenate_runs = False
    s.inputs.subject_info = deepcopy(info)
    s.inputs.output_units = 'secs'
    res = s.run()
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['onset']), np.array(
            [2.0, 50.0, 100.0, 170.0])
    # Test case for variable number of events in separate runs, sometimes unique.
    filename3 = os.path.join(tempdir, 'test3.nii')
    Nifti1Image(np.random.rand(10, 10, 10, 30),
                np.eye(4)).to_filename(filename3)
    s.inputs.functional_runs = [filename1, filename2, filename3]
    info = [
        Bunch(conditions=['cond1', 'cond2'],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]]),
        Bunch(conditions=['cond1', 'cond2'],
              onsets=[[2, 3], [2, 4]],
              durations=[[1, 1], [1, 1]]),
        Bunch(conditions=['cond1', 'cond2'],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]])
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['duration']), np.array([1., 1.])
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][1]['duration']), np.array([
            1.,
        ])
    yield assert_almost_equal, np.array(
        res.outputs.session_info[1]['cond'][1]['duration']), np.array([1., 1.])
    yield assert_almost_equal, np.array(
        res.outputs.session_info[2]['cond'][1]['duration']), np.array([
            1.,
        ])
    # Test case for variable number of events in concatenated runs, sometimes unique.
    s.inputs.concatenate_runs = True
    info = [
        Bunch(conditions=['cond1', 'cond2'],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]]),
        Bunch(conditions=['cond1', 'cond2'],
              onsets=[[2, 3], [2, 4]],
              durations=[[1, 1], [1, 1]]),
        Bunch(conditions=['cond1', 'cond2'],
              onsets=[[2, 3], [2]],
              durations=[[1, 1], [1]])
    ]
    s.inputs.subject_info = deepcopy(info)
    res = s.run()
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][0]['duration']), np.array(
            [1., 1., 1., 1., 1., 1.])
    yield assert_almost_equal, np.array(
        res.outputs.session_info[0]['cond'][1]['duration']), np.array(
            [1., 1., 1., 1.])
    rmtree(tempdir)
コード例 #8
0
def spm_model_specification(behavioral_data, fmri_sessions, onset_name,
                            condition_name, duration_name, time_repetition,
                            realignment_parameters, delimiter, start,
                            concatenate_runs, high_pass_filter_cutoff):
    """ Specify the SPM model used in the GLM and estimate the design matrix.

    .. note::

        * `fmri_sessions` and `behavioral_data` must have the same number
          of elements.
        * `onsets` and `durations` values must have the same units as the
          TR used in the processings (ie. seconds).

    <process>
        <return name="session_info" type="List" desc="session info to leverage
            the first level design."/>
        <input name="behavioral_data" type="List_File" desc="list of .csv
            session behavioral data." />
        <input name="fmri_sessions" type="List_File" desc="list of path to
            fMRI sessions." />
        <input name="onset_name" type="String" desc="the name of the column
            in the `behavioral_data` file containing the onsets."/>
        <input name="condition_name" type="String" desc="the name of the
            column in the `behavioral_data` file containing the conditions."/>
        <input name="duration_name" type="String" desc="the name of the column
            in the `behavioral_data` file containing the condition durations."/>
        <input name="time_repetition" type="Float" desc="the repetition time
            in seconds (in seconds)."/>
        <input name="realignment_parameters" type="File" desc="path to the SPM
            realign output parameters."/>
        <input name="delimiter" type="String" desc="separator used to split
            the `behavioral_data` file."/>
        <input name="start" type="Int" desc="line from which we start reading
            the `behavioral_data` file."/>
        <input name="concatenate_runs" type="Bool" desc="concatenate all runs
            to look like a single session."/>
        <input name="high_pass_filter_cutoff" type="Float" desc="high-pass
            filter cutoff in secs."/>
    </process>
    """
    # Local imports
    from nipype.interfaces.base import Bunch
    from nipype.algorithms.modelgen import SpecifySPMModel

    # Assert that we have one behavioral data per session
    if len(behavioral_data) != len(fmri_sessions):
        raise ValueError("One behavioral data per session is required, "
                         "got {0} behaviral data and {1} session.".format(
                             len(behavioral_data), len(fmri_sessions)))

    # Get each session acquisition conditions
    info = []
    for csvfile in behavioral_data:

        # Parse the behavioural file
        all_onsets = get_onsets(csvfile, condition_name, onset_name,
                                duration_name, delimiter, start)

        # Create a nipype Bunch (dictionary-like) structure
        conditions = []
        onsets = []
        durations = []
        for condition_name, item in all_onsets.items():
            conditions.append(condition_name)
            onsets.append([float(x) for x in item["onsets"]])
            durations.append([float(x) for x in item["durations"]])
        info.append(
            Bunch(conditions=conditions, onsets=onsets, durations=durations))

    # Make a model specification compatible with spm designer
    spec_interface = SpecifySPMModel(
        concatenate_runs=concatenate_runs,
        input_units="secs",
        output_units="secs",
        time_repetition=time_repetition,
        high_pass_filter_cutoff=high_pass_filter_cutoff,
        functional_runs=fmri_sessions,
        subject_info=info,
        realignment_parameters=realignment_parameters)
    spec_interface.run()

    # The previous interface use numpy in double precision. In order to be
    # python-json compliant need to cast expicitely all float items
    def cast_to_float(obj):
        """ Recursive method that cast numpy.double items.

        Parameters
        ----------
        obj: object
            a generic python object.

        Returns
        -------
        out: object
            the float-casted input object.
        """
        # Deal with dictionary
        if isinstance(obj, dict):
            out = {}
            for key, val in obj.items():
                out[key] = cast_to_float(val)

        # Deal with tuple and list
        elif isinstance(obj, (list, tuple)):
            out = []
            for val in obj:
                out.append(cast_to_float(val))
            if isinstance(obj, tuple):
                out = tuple(out)

        # Otherwise cast if it is a numpy.double
        else:
            out = obj
            if isinstance(obj, float):
                out = float(obj)

        return out

    session_info = cast_to_float(
        spec_interface.aggregate_outputs().get()["session_info"])

    return session_info
コード例 #9
0
                            time_repetition=tr,
                            high_pass_filter_cutoff=128)
modelspec.inputs.realignment_parameters = realignment_parameters
modelspec.inputs.functional_runs = func_file
# Read the conditions
import numpy as np
from nipype.interfaces.base import Bunch
paradigm = np.recfromcsv(paradigm_file)
conditions = np.unique(paradigm['name'])
onsets = [paradigm['onset'][paradigm['name'] == condition].tolist()
          for condition in conditions]
durations = [paradigm['duration'][paradigm['name'] == condition].tolist()
             for condition in conditions]
modelspec.inputs.subject_info = Bunch(conditions=conditions, onsets=onsets,
                                      durations=durations)
out_modelspec = modelspec.run()

# Generate an SPM design matrix
from procasl.first_level import Level1PerfusionDesign
from procasl.preprocessing import compute_brain_mask
spm_mat = os.path.join(os.getcwd(), 'SPM.mat')
if os.path.isfile(spm_mat):
    os.remove(spm_mat)  # design crashes if existant SPM.mat

level1design = Level1PerfusionDesign(bases={'hrf': {'derivs': [0, 0]}},
                                     perfusion_bases='bases',
                                     timing_units='secs',
                                     interscan_interval=tr,
                                     model_serial_correlations='AR(1)')
level1design.inputs.mask_image = compute_brain_mask(
    mean_func_file, frac=.2)  # Compute cut neck mask