示例#1
0
    def to_nipy_paradigm(self):

        p = self.join_sessions()
        sorted_conds = sorted(p.stimOnsets.keys())
        onsets = unstack_trees(p.stimOnsets)
        durations = unstack_trees(p.stimDurations)

        cond_ids = [
            np.hstack([[c] * len(o) for c, o in sorted(sess_ons.items())])
            for sess_ons in onsets
        ]
        # print 'cond_ids:', cond_ids
        onsets = [
            np.hstack([sess_ons[c] for c in sorted_conds])
            for sess_ons in onsets
        ]
        durations = [
            np.hstack([sess_dur[c] for c in sorted_conds])
            for sess_dur in durations
        ]
        # print 'onsets:'
        # print onsets
        # print 'durations'
        # print durations
        # print '[(dur>0.).any() for dur in durations]:'
        # print [(dur>0.).any() for dur in durations]

        if any([(dur > 0.).any() for dur in durations]):
            # Block paradigm
            if len(onsets) > 1:
                dd = [('session%02d' % i, BlockParadigm(d[0], d[1], d[2]))
                      for i, d in enumerate(zip(cond_ids, onsets, durations))]
                return dict(dd)
            else:
                return BlockParadigm(cond_ids[0],
                                     onsets[0],
                                     durations[0],
                                     amplitude=None)
        else:
            if len(onsets) > 1:

                dd = [('session%02d' % i, EventRelatedParadigm(d[0], d[1]))
                      for i, d in enumerate(zip(cond_ids, onsets))]
                return dict(dd)
            else:
                return EventRelatedParadigm(cond_ids[0],
                                            onsets[0],
                                            amplitude=None)
示例#2
0
def make_design_matrices(onsets,
                         n_scans,
                         tr,
                         motion=None,
                         hrf_model='canonical with derivative',
                         drift_model='cosine',
                         orthogonalize=None):
    design_matrices = []
    for i, onset in enumerate(onsets):
        if n_scans[i] == 0:
            design_matrices.append(None)
        onset = np.array(onset)
        labels = onset[:, 0]
        time = onset[:, 1].astype('float')
        duration = onset[:, 2].astype('float')
        amplitude = onset[:, 3].astype('float')

        if duration.sum() == 0:
            paradigm = EventRelatedParadigm(labels, time, amplitude)
        else:
            paradigm = BlockParadigm(labels, time, duration, amplitude)

        frametimes = np.linspace(0, (n_scans[i] - 1) * tr, n_scans[i])

        if motion is not None:
            add_regs = np.array(motion[i]).astype('float')
            add_reg_names = ['motion_%i' % r for r in range(add_regs.shape[1])]
            design_matrix = make_dmtx(frametimes,
                                      paradigm,
                                      hrf_model=hrf_model,
                                      drift_model=drift_model,
                                      add_regs=add_regs,
                                      add_reg_names=add_reg_names)
        else:
            design_matrix = make_dmtx(frametimes,
                                      paradigm,
                                      hrf_model=hrf_model,
                                      drift_model=drift_model)

        if orthogonalize is not None:
            if 'derivative' in hrf_model:
                raise Exception(
                    'Orthogonalization not supported with hrf derivative.')
            orth = orthogonalize[i]
            if orth is not None:
                for x, y in orth:
                    x_ = design_matrix.matrix[:, x]
                    y_ = design_matrix.matrix[:, y]
                    z = orthogonalize_vectors(x_, y_)
                    design_matrix.matrix[:, x] = z

        design_matrices.append(design_matrix.matrix)

    return design_matrices
示例#3
0
def make_paradigm(delay, duration=10., length=387., odd=False):
    """ Writes the paradigm file for a certain delay"""
    period = 30 # in seconds
    if odd:
        delay = period - delay
    onsets = np.hstack((delay + period * np.arange(6), length - delay
                        - period * np.arange(6) - duration))
    condition_id = ['checkerboard']
    cids = condition_id * len(onsets)
    durations = duration * np.ones(len(onsets))
    return BlockParadigm(cids, onsets, durations)
示例#4
0
def make_paradigm_from_timing_files(timing_files, condition_ids=None):
    if not condition_ids is None:
        assert len(condition_ids) == len(timing_files)

    onsets = []
    durations = []
    amplitudes = []
    _condition_ids = []
    count = 0
    for timing_file in timing_files:
        timing = np.loadtxt(timing_file)
        if timing.ndim == 1:
            timing = timing[np.newaxis, :]

        if condition_ids is None:
            condition_id = os.path.basename(timing_file).lower().split('.')[0]
        else:
            condition_id = condition_ids[count]
        _condition_ids = _condition_ids + [condition_id] * timing.shape[0]

        count += 1

        if timing.shape[1] == 3:
            onsets = onsets + list(timing[..., 0])
            durations = durations + list(timing[..., 1])
            amplitudes = amplitudes + list(timing[..., 2])
        elif timing.shape[1] == 2:
            onsets = onsets + list(timing[..., 0])
            durations = durations + list(timing[..., 1])
            amplitudes = durations + list(np.ones(len(timing)))
        elif timing.shape[1] == 1:
            onsets = onsets + list(timing[..., 0])
            durations = durations + list(np.zeros(len(timing)))
            amplitudes = durations + list(np.ones(len(timing)))
        else:
            raise TypeError(
                "Timing info must either be 1D array of onsets of 2D "
                "array with 2 or 3 columns: the first column is for "
                "the onsets, the second for the durations, and the "
                "third --if present-- if for the amplitudes; got %s" % timing)

    return BlockParadigm(con_id=_condition_ids,
                         onset=onsets,
                         duration=durations,
                         amplitude=amplitudes)
示例#5
0
def localizer_paradigm(print_option=False):
    """
    Definition:
    Onset for the standart localizer.
    ----------
    Parameters:
    print_option: boolean option, if True jsut print the onsets.
        
    ----------    
    Return
    Either a print of onsets if print_option, otherwise a BlockParadigm object
    """

    onset = np.array([
        0, 2400, 5700, 8700, 11400, 15000, 18000, 20700, 23700, 26700, 29700,
        33000, 35400, 39000, 41700, 44700, 48000, 50700, 53700, 56400, 59700,
        62400, 66000, 69000, 71400, 75000, 78000, 80400, 83400, 87000, 89700,
        93000, 96000, 99000, 102000, 105000, 108000, 110400, 113700, 116700,
        119400, 122700, 125400, 129000, 131400, 135000, 137700, 140400, 143400,
        146700, 149400, 153000, 156000, 159000, 162000, 164400, 167700, 170400,
        173700, 176700, 179700, 182700, 186000, 188400, 191700, 195000, 198000,
        201000, 203700, 207000, 210000, 212700, 215700, 218700, 221400, 224700,
        227700, 230700, 234000, 236700, 240000, 243000, 246000, 248400, 251700,
        254700, 257400, 260400, 264000, 266700, 269700, 272700, 275400, 278400,
        281700, 284400, 288000, 291000, 293400, 296700
    ]) * .001
    task = np.array([
        8, 8, 11, 1, 3, 10, 5, 10, 4, 6, 10, 2, 7, 9, 9, 7, 7, 11, 11, 9, 1, 4,
        11, 5, 6, 9, 11, 11, 7, 3, 10, 11, 2, 11, 11, 11, 7, 11, 11, 6, 10, 2,
        8, 11, 9, 7, 7, 2, 3, 10, 1, 8, 2, 9, 3, 8, 9, 4, 7, 1, 11, 11, 11, 1,
        7, 9, 8, 8, 2, 2, 2, 6, 6, 1, 8, 1, 5, 3, 8, 10, 11, 11, 9, 1, 7, 4, 4,
        8, 2, 1, 1, 11, 5, 2, 11, 10, 9, 5, 10, 10
    ])
    names = [
        'h_checkerboard', 'v_checkerboard', 'r_hand_audio', 'l_hand_audio',
        'r_hand_video', 'l_hand_video', 'computation_audio',
        'computation_video', 'sentence_video', 'sentence_audio'
    ]
    onset, task = onset[task < 11], task[task < 11]
    duration = 1 * np.ones_like(task)
    con_id = np.array([names[t - 1] for t in task])
    if print_option:
        return (con_id, onset)
    else:
        return BlockParadigm(con_id, onset, duration)
示例#6
0
def make_paradigm(filename, **kwargs):
    """
    Constructs design paradigm from run_*_spmdef.txt file

    """

    text = open(filename).read()
    conditions = []
    onsets = []
    durations = []
    for item in re.finditer(
            "(?P<condition>(?:Unfamiliar|Scrambled|Famous))\t+?"
            "(?P<onset>\S+)\t+?(?P<duration>\S+)", text):
        conditions.append(item.group("condition"))
        onsets.append(float(item.group("onset")))
        durations.append(float(item.group("duration")))

    return BlockParadigm(con_id=conditions,
                         onset=onsets,
                         duration=durations,
                         amplitude=np.ones(len(conditions)),
                         **kwargs)
示例#7
0
def make_designmat(frametimes,
                   cond_ids,
                   onsets,
                   durations,
                   amplitudes=None,
                   design_kwargs=None,
                   constant=False,
                   logger=None):
    """
    Creates design matrix from TSV columns
    :param frametimes: time index (in s) of each TR
    :param cond_ids: condition ids. each unique string will become a regressor
    :param onsets: condition onsets
    :param durations: durations of trials
    :param amplitudes: amplitude of trials (default None)
    :param design_kwargs: additional arguments(motion parameters, HRF, etc)
    :param logger: logger instance
    :return: design matrix instance
    """
    if design_kwargs is None:
        design_kwargs = {}
    if "drift_model" not in design_kwargs.keys():
        design_kwargs["drift_model"] = "blank"

    from nipy.modalities.fmri.design_matrix import make_dmtx
    from nipy.modalities.fmri.experimental_paradigm import BlockParadigm

    write_to_logger("Creating design matrix...", logger)
    paradigm = BlockParadigm(con_id=cond_ids,
                             onset=onsets,
                             duration=durations,
                             amplitude=amplitudes)
    dm = make_dmtx(frametimes, paradigm, **design_kwargs)
    if constant is False:
        import numpy as np
        dm.matrix = np.delete(dm.matrix, dm.names.index("constant"), axis=1)
        dm.names = dm.names.remove("constant")
    return dm
示例#8
0
    def _run_interface(self, runtime):
        import nibabel as nb
        import numpy as np
        import nipy.modalities.fmri.glm as GLM
        import nipy.modalities.fmri.design_matrix as dm

        try:
            BlockParadigm = dm.BlockParadigm
        except AttributeError:
            from nipy.modalities.fmri.experimental_paradigm import BlockParadigm

        session_info = self.inputs.session_info

        functional_runs = self.inputs.session_info[0]["scans"]
        if isinstance(functional_runs, (str, bytes)):
            functional_runs = [functional_runs]
        nii = nb.load(functional_runs[0])
        data = nii.get_data()

        if isdefined(self.inputs.mask):
            mask = nb.load(self.inputs.mask).get_data() > 0
        else:
            mask = np.ones(nii.shape[:3]) == 1

        timeseries = data.copy()[mask, :]
        del data

        for functional_run in functional_runs[1:]:
            nii = nb.load(functional_run, mmap=NUMPY_MMAP)
            data = nii.get_data()
            npdata = data.copy()
            del data
            timeseries = np.concatenate((timeseries, npdata[mask, :]), axis=1)
            del npdata

        nscans = timeseries.shape[1]

        if "hpf" in list(session_info[0].keys()):
            hpf = session_info[0]["hpf"]
            drift_model = self.inputs.drift_model
        else:
            hpf = 0
            drift_model = "Blank"

        reg_names = []
        for reg in session_info[0]["regress"]:
            reg_names.append(reg["name"])

        reg_vals = np.zeros((nscans, len(reg_names)))
        for i in range(len(reg_names)):
            reg_vals[:, i] = np.array(
                session_info[0]["regress"][i]["val"]).reshape(1, -1)

        frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans)

        conditions = []
        onsets = []
        duration = []

        for i, cond in enumerate(session_info[0]["cond"]):
            onsets += cond["onset"]
            conditions += [cond["name"]] * len(cond["onset"])
            if len(cond["duration"]) == 1:
                duration += cond["duration"] * len(cond["onset"])
            else:
                duration += cond["duration"]

        if conditions:
            paradigm = BlockParadigm(con_id=conditions,
                                     onset=onsets,
                                     duration=duration)
        else:
            paradigm = None
        design_matrix, self._reg_names = dm.dmtx_light(
            frametimes,
            paradigm,
            drift_model=drift_model,
            hfcut=hpf,
            hrf_model=self.inputs.hrf_model,
            add_regs=reg_vals,
            add_reg_names=reg_names,
        )
        if self.inputs.normalize_design_matrix:
            for i in range(len(self._reg_names) - 1):
                design_matrix[:, i] = (
                    design_matrix[:, i] -
                    design_matrix[:, i].mean()) / design_matrix[:, i].std()

        if self.inputs.plot_design_matrix:
            import pylab

            pylab.pcolor(design_matrix)
            pylab.savefig("design_matrix.pdf")
            pylab.close()
            pylab.clf()

        glm = GLM.GeneralLinearModel()
        glm.fit(
            timeseries.T,
            design_matrix,
            method=self.inputs.method,
            model=self.inputs.model,
        )

        self._beta_file = os.path.abspath("beta.nii")
        beta = np.zeros(mask.shape + (glm.beta.shape[0], ))
        beta[mask, :] = glm.beta.T
        nb.save(nb.Nifti1Image(beta, nii.affine), self._beta_file)

        self._s2_file = os.path.abspath("s2.nii")
        s2 = np.zeros(mask.shape)
        s2[mask] = glm.s2
        nb.save(nb.Nifti1Image(s2, nii.affine), self._s2_file)

        if self.inputs.save_residuals:
            explained = np.dot(design_matrix, glm.beta)
            residuals = np.zeros(mask.shape + (nscans, ))
            residuals[mask, :] = timeseries - explained.T
            self._residuals_file = os.path.abspath("residuals.nii")
            nb.save(nb.Nifti1Image(residuals, nii.affine),
                    self._residuals_file)

        self._nvbeta = glm.nvbeta
        self._dof = glm.dof
        self._constants = glm._constants
        self._axis = glm._axis
        if self.inputs.model == "ar1":
            self._a_file = os.path.abspath("a.nii")
            a = np.zeros(mask.shape)
            a[mask] = glm.a.squeeze()
            nb.save(nb.Nifti1Image(a, nii.affine), self._a_file)
        self._model = glm.model
        self._method = glm.method

        return runtime
示例#9
0
def _fit_hrf_event_model(ds,
                         events,
                         time_attr,
                         condition_attr='targets',
                         design_kwargs=None,
                         glmfit_kwargs=None,
                         regr_attrs=None):
    if externals.exists('nipy', raise_=True):
        from nipy.modalities.fmri.design_matrix import make_dmtx
        from mvpa2.mappers.glm import NiPyGLMMapper

    # Decide/device condition attribute on which GLM will actually be done
    if isinstance(condition_attr, basestring):
        # must be a list/tuple/array for the logic below
        condition_attr = [condition_attr]

    glm_condition_attr = 'regressor_names'  # actual regressors
    glm_condition_attr_map = dict([(con, dict()) for con in condition_attr])  #
    # to map back to original conditions
    events = copy.deepcopy(events)  # since we are modifying in place
    for event in events:
        if glm_condition_attr in event:
            raise ValueError("Event %s already has %s defined.  Should not "
                             "happen.  Choose another name if defined it" %
                             (event, glm_condition_attr))
        compound_label = event[glm_condition_attr] = \
            'glm_label_' + '+'.join(
                str(event[con]) for con in condition_attr)
        # and mapping back to original values, without str()
        # for each condition:
        for con in condition_attr:
            glm_condition_attr_map[con][compound_label] = event[con]

    evvars = _events2dict(events)
    add_paradigm_kwargs = {}
    if 'amplitude' in evvars:
        add_paradigm_kwargs['amplitude'] = evvars['amplitude']
    # create paradigm
    if 'duration' in evvars:
        from nipy.modalities.fmri.experimental_paradigm import BlockParadigm
        # NiPy considers everything with a duration as a block paradigm
        paradigm = BlockParadigm(con_id=evvars[glm_condition_attr],
                                 onset=evvars['onset'],
                                 duration=evvars['duration'],
                                 **add_paradigm_kwargs)
    else:
        from nipy.modalities.fmri.experimental_paradigm \
                import EventRelatedParadigm
        paradigm = EventRelatedParadigm(con_id=evvars[glm_condition_attr],
                                        onset=evvars['onset'],
                                        **add_paradigm_kwargs)
    # create design matrix -- all kinds of fancy additional regr can be
    # auto-generated
    if design_kwargs is None:
        design_kwargs = {}
    if not regr_attrs is None:
        names = []
        regrs = []
        for attr in regr_attrs:
            names.append(attr)
            regrs.append(ds.sa[attr].value)
        if len(regrs) < 2:
            regrs = [regrs]
        regrs = np.hstack(regrs).T
        if 'add_regs' in design_kwargs:
            design_kwargs['add_regs'] = np.hstack(
                (design_kwargs['add_regs'], regrs))
        else:
            design_kwargs['add_regs'] = regrs
        if 'add_reg_names' in design_kwargs:
            design_kwargs['add_reg_names'].extend(names)
        else:
            design_kwargs['add_reg_names'] = names
    design_matrix = make_dmtx(ds.sa[time_attr].value, paradigm,
                              **design_kwargs)

    # push design into source dataset
    glm_regs = [(reg, design_matrix.matrix[:, i])
                for i, reg in enumerate(design_matrix.names)]

    # GLM
    glm = NiPyGLMMapper([],
                        glmfit_kwargs=glmfit_kwargs,
                        add_regs=glm_regs,
                        return_design=True,
                        return_model=True,
                        space=glm_condition_attr)

    model_params = glm(ds)

    # some regressors might be corresponding not to original condition_attr
    # so let's separate them out
    regressor_names = model_params.sa[glm_condition_attr].value
    condition_regressors = np.array(
        [v in glm_condition_attr_map.values()[0] for v in regressor_names])
    assert (condition_regressors.dtype == np.bool)
    if not np.all(condition_regressors):
        # some regressors do not correspond to conditions and would need
        # to be taken into a separate dataset
        model_params.a['add_regs'] = model_params[~condition_regressors]
        # then we process the rest
        model_params = model_params[condition_regressors]
        regressor_names = model_params.sa[glm_condition_attr].value

    # now define proper condition sa's
    for con, con_map in glm_condition_attr_map.iteritems():
        model_params.sa[con] = [con_map[v] for v in regressor_names]
    model_params.sa.pop(glm_condition_attr)  # remove generated one
    return model_params
示例#10
0
def fit_event_hrf_model(ds,
                        events,
                        time_attr,
                        condition_attr='targets',
                        design_kwargs=None,
                        glmfit_kwargs=None,
                        regr_attrs=None,
                        return_model=False):
    """Fit a GLM with HRF regressor and yield a dataset with model parameters

    A univariate GLM is fitted for each feature and model parameters are
    returned as samples. Model parameters are returned for each regressor in
    the design matrix. Using functionality from NiPy, design matrices can be
    generated from event definitions with a variety of customizations (HRF
    model, confound regressors, ...).

    Events need to be specified as a list of dictionaries
    (see:class:`~mvpa2.misc.support.Event`) for a helper class. Each dictionary
    contains all relevant attributes to describe an event.

    HRF event model details
    -----------------------

    The event specifications are used to generate a design matrix for all
    present conditions. In addition to the mandatory ``onset`` information
    each event definition needs to include a label in order to associate
    individual events to conditions (the design matrix will contain at least
    one regressor for each condition). The name of this label attribute must
    be specified too (see ``condition_attr`` argument).

    NiPy is used to generate the actual design matrix.  It is required to
    specify a dataset sample attribute that contains time stamps for all input
    data samples (see ``time_attr``).  NiPy operation could be customized (see
    ``design_kwargs`` argument). Additional regressors from sample attributes
    of the input dataset can be included in the design matrix (see
    ``regr_attrs``).

    The actual GLM fit is also performed by NiPy and can be fully customized
    (see ``glmfit_kwargs``).

    Parameters
    ----------
    ds : Dataset
      The samples of this input dataset have to be in whatever ascending order.
    events : list
      Each event definition has to specify ``onset`` and ``duration``. All
      other attributes will be passed on to the sample attributes collection of
      the returned dataset.
    time_attr : str
      Attribute with dataset sample time stamps.
      Its values will be treated as in-the-same-unit and are used to
      determine corresponding samples from real-value onset and duration
      definitions. For HRF modeling this argument is mandatory.
    condition_attr : str
      Name of the event attribute with the condition labels.
      Can be a list of those (e.g. ['targets', 'chunks'] combination of which
      would constitute a condition.
    design_kwargs : dict
      Arbitrary keyword arguments for NiPy's make_dmtx() used for design matrix
      generation. Choose HRF model, confound regressors, etc.
    glmfit_kwargs : dict
      Arbitrary keyword arguments for NiPy's GeneralLinearModel.fit() used for
      estimating model parameter. Choose fitting algorithm: OLS or AR1.
    regr_attrs : list
      List of dataset sample attribute names that shall be extracted from the
      input dataset and used as additional regressors in the design matrix.
    return_model : bool
      Flag whether to included the fitted GLM model in the returned dataset.
      For large input data this can be problematic, as the model may contain
      the residuals (same size is input data), hence multiplies the memory
      demand. Off by default.

    Returns
    -------
    Dataset
      One sample for each regressor/condition in the design matrix is returned.
      The condition names are included as a sample attribute with the name
      specified by the ``condition_attr`` argument.  The actual design
      regressors are included as ``regressors`` sample attribute. If enabled,
      an instance with the fitted NiPy GLM results is included as a dataset
      attribute ``model``, and can be used for computing contrasts subsequently.

    Examples
    --------
    The documentation also contains an :ref:`example script
    <example_eventrelated>` showing a spatio-temporal analysis of fMRI data
    that involves this function.

    >>> from mvpa2.datasets import Dataset
    >>> ds = Dataset(np.random.randn(10, 25))
    >>> ds.sa['time_coords'] = np.linspace(0, 50, len(ds))
    >>> events = [{'onset': 2, 'duration': 4, 'condition': 'one'},
    ...           {'onset': 4, 'duration': 4, 'condition': 'two'}]
    >>> hrf_estimates = fit_event_hrf_model(
    ...                   ds, events,
    ...                   time_attr='time_coords',
    ...                   condition_attr='condition',
    ...                   design_kwargs=dict(drift_model='blank'),
    ...                   glmfit_kwargs=dict(model='ols'),
    ...                   return_model=True)
    >>> print hrf_estimates.sa.condition
    ['one' 'two']
    >>> print hrf_estimates.shape
    (2, 25)
    >>> len(hrf_estimates.a.model.get_mse())
    25

    Additional regressors used in GLM modeling are also available in a
    dataset attribute:

    >>> print hrf_estimates.a.add_regs.sa.regressor_names
    ['constant']
    """
    if externals.exists('nipy', raise_=True):
        from nipy.modalities.fmri.design_matrix import make_dmtx
        from mvpa2.mappers.glm import NiPyGLMMapper

    # Decide/device condition attribute on which GLM will actually be done
    if isinstance(condition_attr, str):
        # must be a list/tuple/array for the logic below
        condition_attr = [condition_attr]

    glm_condition_attr = 'regressor_names'  # actual regressors
    glm_condition_attr_map = dict([(con, dict()) for con in condition_attr])  #
    # to map back to original conditions
    events = copy.deepcopy(events)  # since we are modifying in place
    for event in events:
        if glm_condition_attr in event:
            raise ValueError("Event %s already has %s defined.  Should not "
                             "happen.  Choose another name if defined it" %
                             (event, glm_condition_attr))
        compound_label = event[glm_condition_attr] = \
            'glm_label_' + '+'.join(
                str(event[con]) for con in condition_attr)
        # and mapping back to original values, without str()
        # for each condition:
        for con in condition_attr:
            glm_condition_attr_map[con][compound_label] = event[con]

    evvars = _events2dict(events)
    add_paradigm_kwargs = {}
    if 'amplitude' in evvars:
        add_paradigm_kwargs['amplitude'] = evvars['amplitude']
    # create paradigm
    if 'duration' in evvars:
        from nipy.modalities.fmri.experimental_paradigm import BlockParadigm
        # NiPy considers everything with a duration as a block paradigm
        paradigm = BlockParadigm(con_id=evvars[glm_condition_attr],
                                 onset=evvars['onset'],
                                 duration=evvars['duration'],
                                 **add_paradigm_kwargs)
    else:
        from nipy.modalities.fmri.experimental_paradigm \
                import EventRelatedParadigm
        paradigm = EventRelatedParadigm(con_id=evvars[glm_condition_attr],
                                        onset=evvars['onset'],
                                        **add_paradigm_kwargs)
    # create design matrix -- all kinds of fancy additional regr can be
    # auto-generated
    if design_kwargs is None:
        design_kwargs = {}

    if regr_attrs is not None:
        names = []
        regrs = []
        for attr in regr_attrs:
            regr = ds.sa[attr].value
            # add rudimentary dimension for easy hstacking later on
            if regr.ndim < 2:
                regr = regr[:, np.newaxis]
            if regr.shape[1] == 1:
                names.append(attr)
            else:
                #  add one per each column of the regressor
                for i in range(regr.shape[1]):
                    names.append("%s.%d" % (attr, i))
            regrs.append(regr)
        regrs = np.hstack(regrs)

        if 'add_regs' in design_kwargs:
            design_kwargs['add_regs'] = np.hstack(
                (design_kwargs['add_regs'], regrs))
        else:
            design_kwargs['add_regs'] = regrs
        if 'add_reg_names' in design_kwargs:
            design_kwargs['add_reg_names'].extend(names)
        else:
            design_kwargs['add_reg_names'] = names

    design_matrix = make_dmtx(ds.sa[time_attr].value, paradigm,
                              **design_kwargs)

    # push design into source dataset
    glm_regs = [(reg, design_matrix.matrix[:, i])
                for i, reg in enumerate(design_matrix.names)]

    # GLM
    glm = NiPyGLMMapper([],
                        glmfit_kwargs=glmfit_kwargs,
                        add_regs=glm_regs,
                        return_design=True,
                        return_model=return_model,
                        space=glm_condition_attr)

    model_params = glm(ds)

    # some regressors might be corresponding not to original condition_attr
    # so let's separate them out
    regressor_names = model_params.sa[glm_condition_attr].value
    condition_regressors = np.array([
        v in list(glm_condition_attr_map.values())[0] for v in regressor_names
    ])
    assert (condition_regressors.dtype == np.bool)
    if not np.all(condition_regressors):
        # some regressors do not correspond to conditions and would need
        # to be taken into a separate dataset
        model_params.a['add_regs'] = model_params[~condition_regressors]
        # then we process the rest
        model_params = model_params[condition_regressors]
        regressor_names = model_params.sa[glm_condition_attr].value

    # now define proper condition sa's
    for con, con_map in glm_condition_attr_map.items():
        model_params.sa[con] = [con_map[v] for v in regressor_names]
    model_params.sa.pop(glm_condition_attr)  # remove generated one
    return model_params
示例#11
0
def do_subject_glm(subject_data):
    """FE analysis for a single subject."""
    subject_id = subject_data['subject_id']
    output_dir = subject_data["output_dir"]
    func_files = subject_data['func']
    anat = subject_data['anat']
    onset_files = subject_data['onset']
    tr = subject_data['TR']
    time_units = subject_data['time_units'].lower()
    assert time_units in ["seconds", "tr", "milliseconds"]
    drift_model = subject_data['drift_model']
    hrf_model = subject_data["hrf_model"]
    hfcut = subject_data["hfcut"]
    mem = Memory(os.path.join(output_dir, "cache"))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    if 0:
        subject_data = mem.cache(do_subject_preproc)(dict(
            func=func_files, anat=anat, output_dir=output_dir))
        func_files = subject_data['func']
        anat = subject_data['anat']

        # reslice func images
        func_files = [
            mem.cache(reslice_vols)(sess_func,
                                    target_affine=nibabel.load(
                                        sess_func[0]).get_affine())
            for sess_func in func_files
        ]

    ### GLM: loop on (session_bold, onse_file) pairs over the various sessions
    design_matrices = []
    for func_file, onset_file in zip(func_files, onset_files):
        if isinstance(func_file, str):
            bold = nibabel.load(func_file)
        else:
            if len(func_file) == 1:
                func_file = func_file[0]
                bold = nibabel.load(func_file)
                assert len(bold.shape) == 4
                n_scans = bold.shape[-1]
                del bold
            else:
                n_scans = len(func_file)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        conditions, onsets, durations, amplitudes = parse_onset_file(
            onset_file)
        if time_units == "tr":
            onsets *= tr
            durations *= tr
        elif time_units in ["milliseconds"]:
            onsets *= 1e-3
            durations *= 1e-3
        paradigm = BlockParadigm(con_id=conditions,
                                 onset=onsets,
                                 duration=durations,
                                 amplitude=amplitudes)
        design_matrices.append(
            make_dmtx(frametimes,
                      paradigm,
                      hrf_model=hrf_model,
                      drift_model=drift_model,
                      hfcut=hfcut))

    # specify contrasts
    n_columns = len(design_matrices[0].names)
    contrasts = {}
    for i in range(paradigm.n_conditions):
        contrasts['%s' %
                  design_matrices[0].names[2 * i]] = np.eye(n_columns)[2 * i]

    # effects of interest F-test
    diff_contrasts = []
    for i in range(paradigm.n_conditions - 1):
        a = contrasts[design_matrices[0].names[2 * i]]
        b = contrasts[design_matrices[0].names[2 * (i + 1)]]
        diff_contrasts.append(a - b)
    contrasts["diff"] = diff_contrasts

    # fit GLM
    print('Fitting a GLM (this takes time)...')
    fmri_glm = FMRILinearModel([
        nibabel.concat_images(sess_func, check_affines=False)
        for sess_func in func_files
    ], [design_matrix.matrix for design_matrix in design_matrices],
                               mask='compute')
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(output_dir, "mask.nii.gz")

    print("Saving mask image %s" % mask_path)
    nibabel.save(fmri_glm.mask, mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.items():
        print("\tcontrast id: %s" % contrast_id)
        if np.ndim(contrast_val) > 1:
            contrast_type = "t"
        else:
            contrast_type = "F"
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * len(func_files),
            con_id=contrast_id,
            contrast_type=contrast_type,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True)

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                     [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(map_dir, '%s.nii.gz' % contrast_id)
            print("\t\tWriting %s ..." % map_path)
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    return subject_id, anat, effects_maps, z_maps, contrasts, fmri_glm.mask
示例#12
0
    # (Requires the preprocessed image and the following paradigm lists from above:
    #  conditions, onsets, durations, amplitudes)
    #=========================================================================
    if run_analysis:
        ('Run general linear model analysis for each test...')
        img = nb.load(smooth_file)

        #-----------------------------------------------------------------
        # Construct a design matrix for each test
        #-----------------------------------------------------------------
        print('  Make design matrix...')
        print('    Conditions:\n      {}'.format(conditions))
        print('    Amplitudes:\n      {}'.format(amplitudes))
        print('    Onsets:\n      {}'.format(onsets))
        print('    Durations:\n      {}'.format(durations))
        paradigm = BlockParadigm(con_id=conditions, onset=onsets,
                                 duration=durations, amplitude=amplitudes)
        frametimes = np.linspace(0, n_images-1, n_images)

        if ntest < 3:
            dmtx = make_dmtx(frametimes, paradigm, hrf_model='FIR',
                             drift_model='polynomial', drift_order=2, hfcut=np.inf)
        else:
            dmtx = make_dmtx(frametimes, paradigm, hrf_model='FIR', hfcut=np.inf)
        design_matrix = dmtx.matrix

        # Plot the design matrix
        if plot_design_matrix:
            fig1 = mp.figure(figsize=(10, 6))
            dmtx.show()
            mp.title(desc)
            fig1_file = os.path.join(out_path, label + 'design_matrix_test' + \
# fetch spm auditory data
fetch_spm_auditory_data(dataset_dir)

# preprocess the data
subject_data = do_subjects_preproc(jobfile, dataset_dir=dataset_dir)[0]

# construct experimental paradigm
stats_start_time = time.ctime()
tr = 7.
n_scans = 96
_duration = 6
epoch_duration = _duration * tr
conditions = ['rest', 'active'] * 8
duration = epoch_duration * np.ones(len(conditions))
onset = np.linspace(0, (len(conditions) - 1) * epoch_duration, len(conditions))
paradigm = BlockParadigm(con_id=conditions, onset=onset, duration=duration)
hfcut = 2 * 2 * epoch_duration

# construct design matrix
nscans = len(subject_data.func[0])
frametimes = np.linspace(0, (nscans - 1) * tr, nscans)
drift_model = 'Cosine'
hrf_model = 'Canonical With Derivative'
design_matrix = make_dmtx(frametimes,
                          paradigm,
                          hrf_model=hrf_model,
                          drift_model=drift_model,
                          hfcut=hfcut)

# plot and save design matrix
ax = design_matrix.show()
示例#14
0
def do_subject_glm(subject_data):
    """FE analysis for a single subject."""
    subject_id = subject_data['subject_id']
    output_dir = subject_data["output_dir"]
    func_files = subject_data['func']
    anat = subject_data['anat']
    onset_files = subject_data['onset']
    # subject_id = os.path.basename(subject_dir)
    # subject_output_dir = os.path.join(output_dir, subject_id)
    mem = Memory(os.path.join(output_dir, "cache"))
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    # glob files: anat, session func files, session onset files
    # anat = glob.glob(os.path.join(subject_dir, anat_wildcard))
    # assert len(anat) == 1
    # anat = anat[0]
    # onset_files = sorted([glob.glob(os.path.join(subject_dir, session))[0]
    #                       for session in session_onset_wildcards])
    # func_files = sorted([sorted(glob.glob(os.path.join(subject_dir, session)))
    #                      for session in session_func_wildcards])

    ### Preprocess data #######################################################
    if 0:
        subject_data = mem.cache(do_subject_preproc)(
            dict(func=func_files, anat=anat, output_dir=output_dir))
        func_files = subject_data['func']
        anat = subject_data['anat']

        # reslice func images
        func_files = [mem.cache(reslice_vols)(
            sess_func,
            target_affine=nibabel.load(sess_func[0]).get_affine())
                      for sess_func in func_files]

    ### GLM: loop on (session_bold, onse_file) pairs over the various sessions
    design_matrices = []
    for session, (func_file, onset_file) in enumerate(zip(func_files,
                                                          onset_files)):
        if isinstance(func_file, str):
            bold = nibabel.load(func_file)
        else:
            if len(func_file) == 1:
                func_file = func_file[0]
                bold = nibabel.load(func_file)
                assert len(bold.shape) == 4
                n_scans = bold.shape[-1]
                del bold
            else:
                n_scans = len(func_file)
        frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
        conditions, onsets, durations, amplitudes = parse_onset_file(
            onset_file)
        onsets *= tr
        durations *= tr
        paradigm = BlockParadigm(con_id=conditions, onset=onsets,
                                 duration=durations, amplitude=amplitudes)
        design_matrices.append(make_dmtx(frametimes,
                                         paradigm, hrf_model=hrf_model,
                                         drift_model=drift_model,
                                         hfcut=hfcut))

    # specify contrasts
    n_columns = len(design_matrices[0].names)
    contrasts = {}
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrices[0].names[2 * i]
                  ] = np.eye(n_columns)[2 * i]

    # more interesting contrasts
    contrasts['faces-scrambled'] = contrasts['faces'
                                             ] - contrasts['scrambled']
    contrasts['scrambled-faces'] = -contrasts['faces-scrambled']
    contrasts['effects_of_interest'] = contrasts['faces'
                                                 ] + contrasts['scrambled']

    # effects of interest F-test
    diff_contrasts = []
    for i in xrange(paradigm.n_conditions - 1):
        a = contrasts[design_matrices[0].names[2 * i]]
        b = contrasts[design_matrices[0].names[2 * (i + 1)]]
        diff_contrasts.append(a - b)
    contrasts["diff"] = diff_contrasts

    # fit GLM
    print 'Fitting a GLM (this takes time)...'
    fmri_glm = FMRILinearModel([nibabel.concat_images(sess_func,
                                                      check_affines=False)
                                for sess_func in func_files],
                               [design_matrix.matrix
                                for design_matrix in design_matrices],
                               mask='compute'
                               )
    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(output_dir, "mask.nii.gz")

    print "Saving mask image %s" % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute contrasts
    z_maps = {}
    effects_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        if np.ndim(contrast_val) > 1:
            contrast_type = "t"
        else:
            contrast_type = "F"
        z_map, t_map, effects_map, var_map = fmri_glm.contrast(
            [contrast_val] * 2,
            con_id=contrast_id,
            contrast_type=contrast_type,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True
            )

        # store stat maps to disk
        for map_type, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, effects_map, var_map]):
            map_dir = os.path.join(
                output_dir, '%s_maps' % map_type)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            print "\t\tWriting %s ..." % map_path
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if map_type == 'z':
                z_maps[contrast_id] = map_path
            if map_type == 'effects':
                effects_maps[contrast_id] = map_path

    return subject_id, anat, effects_maps, z_maps, contrasts, fmri_glm.mask
示例#15
0
文件: glm.py 项目: pathakdivya/bdpy
def make_paradigm(event_files, num_vols, tr=2., cond_col=2, label_col=None, regressors=None, ignore_col=None, ignore_value=[], trial_wise=False, design='block'):
    '''
    Make paradigm for GLM with Nipy from BIDS task event files.

    Parameters
    ----------
    event_files : list
      List of task event files.
    num_vols : list
      List of the number of volumes in each run.
    tr : int or float
      TR in sec.
    cond_col : int
      Index of the condition column in the task event files.
    label_col : int
      Index of the label column in the task event files.
    regressors : list
      Names of regressors (conditions) included in the design matrix.
    ignore_col : int
      Index of the column to be ingored.
    ignore_value : list
      List of values to be ignored.
    design : 'block' or 'event_related
      Specifying experimental design.
    trial_wise : bool
      Returns trial-wise design matrix if True.

    Returns
    -------
    dict
      paradigm : nipy.Paradigm
      condition_labels : labels for task regressors
      run_regressors : nuisance regressors for runs
      run_regressors_label : labels for the run regressors
    '''

    onset = []
    duration = []
    conds = []
    labels = []

    # Run regressors
    run_regs = []
    run_regs_labels = []

    n_total_vols = np.sum(num_vols)

    trial_count = 0

    # Combining all runs/sessions into a single design matrix
    for i, (ef, nv) in enumerate(zip(event_files, num_vols)):
        n_run = i + 1

        with open(ef, 'r') as f:
            reader = csv.reader(f, delimiter='\t')
            header = reader.next()
            for row in reader:
                if not regressors is None and not row[cond_col] in regressors:
                    continue
                if not ignore_col is None:
                    if row[ignore_col] in ignore_value:
                        continue
                trial_count += 1
                onset.append(float(row[0]) + i * nv * tr)
                duration.append(float(row[1]))
                if trial_wise:
                    conds.append('trial-%06d' % trial_count)
                else:
                    conds.append(row[cond_col])
                if not label_col is None:
                    labels.append(row[label_col])

        # Run regressors
        run_reg = np.zeros((n_total_vols, 1))
        run_reg[i * nv:(i + 1) * nv] = 1
        run_regs.append(run_reg)
        run_regs_labels.append('run-%02d' % n_run)

    run_regs = np.hstack(run_regs)

    if design == 'event_related':
        paradigm = EventRelatedParadigm(con_id=conds, onset=onset)
    else:
        paradigm = BlockParadigm(con_id=conds, onset=onset, duration=duration)

    return {'paradigm': paradigm,
            'run_regressors': run_regs,
            'run_regressor_labels': run_regs_labels,
            'condition_labels': labels}
def execute_spm_auditory_glm(data, reg_motion=False):
    reg_motion = reg_motion and 'realignment_parameters' in data

    tr = 7.
    n_scans = 96
    _duration = 6
    epoch_duration = _duration * tr
    conditions = ['rest', 'active'] * 8
    duration = epoch_duration * np.ones(len(conditions))
    onset = np.linspace(0, (len(conditions) - 1) * epoch_duration,
                        len(conditions))
    paradigm = BlockParadigm(con_id=conditions, onset=onset, duration=duration)
    hfcut = 2 * 2 * epoch_duration

    # construct design matrix
    frametimes = np.linspace(0, (n_scans - 1) * tr, n_scans)
    drift_model = 'Cosine'
    hrf_model = 'Canonical With Derivative'

    add_reg_names = None
    add_regs = None
    if reg_motion:
        add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']
        add_regs = data['realignment_parameters'][0]
        if isinstance(add_regs, basestring):
            add_regs = np.loadtxt(add_regs)

    design_matrix = make_dmtx(frametimes,
                              paradigm, hrf_model=hrf_model,
                              drift_model=drift_model, hfcut=hfcut,
                              add_reg_names=add_reg_names,
                              add_regs=add_regs)

    # plot and save design matrix
    ax = design_matrix.show()
    ax.set_position([.05, .25, .9, .65])
    ax.set_title('Design matrix')
    dmat_outfile = os.path.join(data['output_dir'],
                                'design_matrix.png')
    pl.savefig(dmat_outfile, bbox_inches="tight", dpi=200)
    pl.close()

    # specify contrasts
    contrasts = {}
    n_columns = len(design_matrix.names)
    for i in xrange(paradigm.n_conditions):
        contrasts['%s' % design_matrix.names[2 * i]] = np.eye(n_columns)[2 * i]

    # more interesting contrasts"""
    contrasts['active-rest'] = contrasts['active'] - contrasts['rest']

    # fit GLM
    print('\r\nFitting a GLM (this takes time)...')
    fmri_glm = FMRILinearModel(load_4D_img(data['func'][0]),
                               design_matrix.matrix,
                               mask='compute')

    fmri_glm.fit(do_scaling=True, model='ar1')

    # save computed mask
    mask_path = os.path.join(data['output_dir'], "mask.nii.gz")
    print "Saving mask image %s..." % mask_path
    nibabel.save(fmri_glm.mask, mask_path)

    # compute bg unto which activation will be projected
    anat_img = load_vol(data['anat'])

    anat = anat_img.get_data()

    if anat.ndim == 4:
        anat = anat[..., 0]

    anat_affine = anat_img.get_affine()

    print "Computing contrasts..."
    z_maps = {}
    for contrast_id, contrast_val in contrasts.iteritems():
        print "\tcontrast id: %s" % contrast_id
        z_map, t_map, eff_map, var_map = fmri_glm.contrast(
            contrasts[contrast_id],
            con_id=contrast_id,
            output_z=True,
            output_stat=True,
            output_effects=True,
            output_variance=True,
            )

        # store stat maps to disk
        for dtype, out_map in zip(['z', 't', 'effects', 'variance'],
                                  [z_map, t_map, eff_map, var_map]):
            map_dir = os.path.join(
                data['output_dir'], '%s_maps' % dtype)
            if not os.path.exists(map_dir):
                os.makedirs(map_dir)
            map_path = os.path.join(
                map_dir, '%s.nii.gz' % contrast_id)
            nibabel.save(out_map, map_path)

            # collect zmaps for contrasts we're interested in
            if contrast_id == 'active-rest' and dtype == "z":
                z_maps[contrast_id] = map_path

            print "\t\t%s map: %s" % (dtype, map_path)

        print

    # do stats report
    stats_report_filename = os.path.join(data['reports_output_dir'],
                                         "report_stats.html")
    contrasts = dict((contrast_id, contrasts[contrast_id])
                     for contrast_id in z_maps.keys())
    generate_subject_stats_report(
        stats_report_filename,
        contrasts,
        z_maps,
        fmri_glm.mask,
        design_matrices=[design_matrix],
        subject_id=data['subject_id'],
        anat=anat,
        anat_affine=anat_affine,
        cluster_th=50,  # we're only interested in this 'large' clusters

        # additional ``kwargs`` for more informative report
        paradigm=paradigm.__dict__,
        TR=tr,
        n_scans=n_scans,
        hfcut=hfcut,
        frametimes=frametimes,
        drift_model=drift_model,
        hrf_model=hrf_model,
        )

    ProgressReport().finish_dir(data['output_dir'])

    print "\r\nStatistic report written to %s\r\n" % stats_report_filename
示例#17
0
def make_designmat(ds,
                   eorig,
                   time_attr='time_coords',
                   condition_attr='targets',
                   design_kwargs=None,
                   regr_attrs=None):
    # make glm regressors for all attributes. so loop through condition_attr and add them all...
    import copy
    from nipy.modalities.fmri.design_matrix import make_dmtx
    import numpy as np
    # Decide/device condition attribute on which GLM will actually be done
    if isinstance(condition_attr, basestring):
        # must be a list/tuple/array for the logic below
        condition_attr = [condition_attr]

    e = copy.deepcopy(eorig)  # since we are modifying in place
    glm_condition_attrs = []
    for i, con in enumerate(condition_attr):
        glm_condition_attr = 'regressors_' + str(con)
        glm_condition_attrs.append(glm_condition_attr)
        for ei in e:
            if glm_condition_attr in ei:
                raise ValueError(
                    "Event %s already has %s defined.  Should not "
                    "happen.  Choose another name if defined it" %
                    (ei, glm_condition_attr))
            ei[glm_condition_attr] = \
                'glm_label_' + str(con) + '_' + '+'.join(str(ei[c]) for c in [con])

    evvars = events2dict(e)
    add_paradigm_kwargs = {}
    if 'amplitude' in evvars:
        add_paradigm_kwargs['amplitude'] = evvars['amplitude']
    if design_kwargs is None:
        design_kwargs = {}
    if regr_attrs is not None:
        names = []
        regrs = []
        for attr in regr_attrs:
            regr = ds.sa[attr].value
            # add rudimentary dimension for easy hstacking later on
            if regr.ndim < 2:
                regr = regr[:, np.newaxis]
            if regr.shape[1] == 1:
                names.append(attr)
            else:
                #  add one per each column of the regressor
                for i in xrange(regr.shape[1]):
                    names.append("%s.%d" % (attr, i))
            regrs.append(regr)
        regrs = np.hstack(regrs)

        if 'add_regs' in design_kwargs:
            design_kwargs['add_regs'] = np.hstack(
                (design_kwargs['add_regs'], regrs))
        else:
            design_kwargs['add_regs'] = regrs
        if 'add_reg_names' in design_kwargs:
            design_kwargs['add_reg_names'].extend(names)
        else:
            design_kwargs['add_reg_names'] = names

    X = {}
    for ci, con in enumerate(condition_attr):
        # create paradigm
        if 'duration' in evvars:
            from nipy.modalities.fmri.experimental_paradigm import BlockParadigm
            # NiPy considers everything with a duration as a block paradigm
            paradigm = BlockParadigm(con_id=evvars[glm_condition_attrs[ci]],
                                     onset=evvars['onset'],
                                     duration=evvars['duration'],
                                     **add_paradigm_kwargs)
        else:
            from nipy.modalities.fmri.experimental_paradigm \
                import EventRelatedParadigm
            paradigm = EventRelatedParadigm(
                con_id=evvars[glm_condition_attrs[ci]],
                onset=evvars['onset'],
                **add_paradigm_kwargs)
        X[con] = make_dmtx(ds.sa[time_attr].value,
                           paradigm=paradigm,
                           **design_kwargs)
        for i, reg in enumerate(X[con].names):
            ds.sa[reg] = X[con].matrix[:, i]
        if con in ds.sa.keys():
            ds.sa.pop(con)

        for reg in ds.sa.keys():
            if str(con) + '0' in reg:
                ds.sa['glm_label_probe'] = ds.sa.pop(reg)

    # concatenate X... add chunk regressors...
    # if 'chunks' in ds.sa.keys():
    #     for i in ds.sa['chunks'].unique:
    #         ds.sa['glm_label_chunks' + str(i)] = np.array(ds.sa['chunks'].value == i, dtype=np.int)
    return X, ds
示例#18
0
    def _run_interface(self, runtime):

        session_info = self.inputs.session_info

        functional_runs = self.inputs.session_info[0]['scans']
        if isinstance(functional_runs, str):
            functional_runs = [functional_runs]
        nii = nb.load(functional_runs[0])
        data = nii.get_data()

        if isdefined(self.inputs.mask):
            mask = nb.load(self.inputs.mask).get_data() > 0
        else:
            mask = np.ones(nii.shape[:3]) == 1

        timeseries = data.copy()[mask, :]
        del data

        for functional_run in functional_runs[1:]:
            nii = nb.load(functional_run)
            data = nii.get_data()
            npdata = data.copy()
            del data
            timeseries = np.concatenate((timeseries, npdata[mask, :]), axis=1)
            del npdata

        nscans = timeseries.shape[1]

        if 'hpf' in session_info[0].keys():
            hpf = session_info[0]['hpf']
            drift_model = self.inputs.drift_model
        else:
            hpf = 0
            drift_model = "Blank"

        reg_names = []
        for reg in session_info[0]['regress']:
            reg_names.append(reg['name'])

        reg_vals = np.zeros((nscans, len(reg_names)))
        for i in range(len(reg_names)):
            reg_vals[:, i] = np.array(
                session_info[0]['regress'][i]['val']).reshape(1, -1)

        frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans)

        conditions = []
        onsets = []
        duration = []

        for i, cond in enumerate(session_info[0]['cond']):
            onsets += cond['onset']
            conditions += [cond['name']] * len(cond['onset'])
            if len(cond['duration']) == 1:
                duration += cond['duration'] * len(cond['onset'])
            else:
                duration += cond['duration']

        if conditions:
            paradigm = BlockParadigm(con_id=conditions,
                                     onset=onsets,
                                     duration=duration)
        else:
            paradigm = None
        design_matrix, self._reg_names = dm.dmtx_light(
            frametimes,
            paradigm,
            drift_model=drift_model,
            hfcut=hpf,
            hrf_model=self.inputs.hrf_model,
            add_regs=reg_vals,
            add_reg_names=reg_names)
        if self.inputs.normalize_design_matrix:
            for i in range(len(self._reg_names) - 1):
                design_matrix[:, i] = (
                    design_matrix[:, i] -
                    design_matrix[:, i].mean()) / design_matrix[:, i].std()

        if self.inputs.plot_design_matrix:
            import pylab
            pylab.pcolor(design_matrix)
            pylab.savefig("design_matrix.pdf")
            pylab.close()
            pylab.clf()

        glm = GLM.glm()
        glm.fit(timeseries.T,
                design_matrix,
                method=self.inputs.method,
                model=self.inputs.model)

        self._beta_file = os.path.abspath("beta.nii")
        beta = np.zeros(mask.shape + (glm.beta.shape[0], ))
        beta[mask, :] = glm.beta.T
        nb.save(nb.Nifti1Image(beta, nii.get_affine()), self._beta_file)

        self._s2_file = os.path.abspath("s2.nii")
        s2 = np.zeros(mask.shape)
        s2[mask] = glm.s2
        nb.save(nb.Nifti1Image(s2, nii.get_affine()), self._s2_file)

        if self.inputs.save_residuals:
            explained = np.dot(design_matrix, glm.beta)
            residuals = np.zeros(mask.shape + (nscans, ))
            residuals[mask, :] = timeseries - explained.T
            self._residuals_file = os.path.abspath("residuals.nii")
            nb.save(nb.Nifti1Image(residuals, nii.get_affine()),
                    self._residuals_file)

        self._nvbeta = glm.nvbeta
        self._dof = glm.dof
        self._constants = glm._constants
        self._axis = glm._axis
        if self.inputs.model == "ar1":
            self._a_file = os.path.abspath("a.nii")
            a = np.zeros(mask.shape)
            a[mask] = glm.a.squeeze()
            nb.save(nb.Nifti1Image(a, nii.get_affine()), self._a_file)
        self._model = glm.model
        self._method = glm.method

        return runtime