Ejemplo n.º 1
0
def make_design_mtx(spm_mvt_file, hfcut=128, skip_TR=SKIPTR, normalize=True, to_add=None):
    """
    create the design matrix
    """
    mvt_arr = np.loadtxt(spm_mvt_file)
    mvt_arr = mvt_arr[skip_TR:,:]
    mvt_lab = ['tx','ty','tz','rx', 'ry', 'rz'] 
    
    assert mvt_arr.shape == (SK4RUNLENGH,6)
    if to_add is not None:
        assert to_add.shape[0] == SK4RUNLENGH
        addlab = [ 'a'+str(idx) for idx in range(to_add.shape[1])]
        mvt_arr = np.hstack((mvt_arr, to_add))
        mvt_lab = mvt_lab + addlab
    

    nbframes = mvt_arr.shape[0]
    frametimes = np.linspace(0.0, (nbframes-1)*2.0, num=nbframes)
    
    X, labels = dsgn_mtrx.dmtx_light(frametimes, paradigm=None, hrf_model='canonical',
              drift_model='cosine', hfcut=hfcut, 
              drift_order=1,
              fir_delays=[0], add_regs=mvt_arr, add_reg_names=mvt_lab)

    if normalize:
    	X -= X.mean(axis=0)
	Xstd = X.std(axis=0)
	non_zero_col = Xstd > np.finfo(float).eps
	zero_col = np.logical_not(non_zero_col)
	X = np.hstack((X[:,non_zero_col]/Xstd[non_zero_col], X[:, zero_col])) 
    
    return X, labels
Ejemplo n.º 2
0
    def _run_interface(self, runtime):

        assert len(self.inputs.onsets) == len(self.inputs.events)

        self.inputs.onsets, self.inputs.events = [
            list(e) for e in zip(*sorted(zip(self.inputs.onsets, self.inputs.events)))
        ]

        if self.inputs.n_blocks == 0:
            self.inputs.n_blocks = int(np.sqrt(len(self.inputs.onsets)))

        image = nb.load(self.inputs.data)
        data = image.get_data()
        self.data_shape = image.shape

        length_block = self.data_shape[-1] / self.inputs.n_blocks

        paradigm = EventRelatedParadigm(self.inputs.events, self.inputs.onsets)
        frametimes = np.arange(0, self.data_shape[-1] * self.inputs.TR, self.inputs.TR)
        X, names = dm.dmtx_light(frametimes, paradigm, drift_model="polynomial", hfcut=128, hrf_model="canonical")

        for i in np.arange(0, self.data_shape[-1] + 1, length_block)[:-1]:
            save(X[i : i + length_block, :], os.path.abspath("./design_matrices_%s.hdf5" % (i / length_block)))
            #            nb.save(nb.Nifti1Image(data[:, :, :, i:i+length_block], image.get_affine(), image.get_header()), os.path.abspath('data_%s.nii.gz' % (i/length_block)))
            nb.save(
                nb.Nifti1Image(data[..., i : i + length_block], image.get_affine(), image.get_header()),
                os.path.abspath("data_%s.nii.gz" % (i / length_block)),
            )

        return runtime
Ejemplo n.º 3
0
    def _run_interface(self, runtime):

        
        session_info = self.inputs.session_info
        TR = self.inputs.TR
        
       
        data = nb.load(session_info['scans'])        
        
        if not isdefined(self.inputs.q):
            self.inputs.q = int(np.sqrt(data.shape[-1]))

        
        q = self.inputs.q
        
        events = [[d['name']] * len(d['onset']) for d in session_info['cond']]
        onsets = [d['onset'] for d in session_info['cond']]

        import itertools
        events = list(itertools.chain(*events))
        onsets = list(itertools.chain(*onsets))

        paradigm = EventRelatedParadigm(events, onsets)

        frametimes = np.arange(0, data.shape[-1] * TR, TR) 
        X, names = dm.dmtx_light(frametimes, paradigm, drift_model='polynomial',
                  hfcut=128, hrf_model='canonical')
        
        np.save(os.path.abspath('X.npy'), X)

        cut_size = data.shape[-1] / (q)
        cuts = np.arange(0, data.shape[-1]+1, cut_size)

        data_cut = np.array([data.get_data()[..., cuts[i]:cuts[i+1]] for i in np.arange(q)])
        X_cut = np.array([X[cuts[i]:cuts[i+1], :] for i in np.arange(q)])
        
        for i, (d, fn) in enumerate(zip(data_cut, self._gen_fnames('cutted_data'))):
            nb.save(nb.Nifti1Image(d, data.get_affine()), fn)

        for i, (x, fn) in enumerate(zip(X_cut, self._gen_fnames('design_matrices'))):
            np.save(fn, x)


        return runtime
Ejemplo n.º 4
0
    def _run_interface(self, runtime):

        session_info = self.inputs.session_info

        functional_runs = self.inputs.session_info[0]['scans']
        if isinstance(functional_runs, str):
            functional_runs = [functional_runs]
        nii = nb.load(functional_runs[0])
        data = nii.get_data()


        if isdefined(self.inputs.mask):
            mask = nb.load(self.inputs.mask).get_data() > 0
        else:
            mask = np.ones(nii.shape[:3]) == 1

        timeseries = data.copy()[mask,:]
        del data

        for functional_run in functional_runs[1:]:
            nii = nb.load(functional_run)
            data = nii.get_data()
            npdata = data.copy()
            del data
            timeseries = np.concatenate((timeseries,npdata[mask,:]), axis=1)
            del npdata

        nscans = timeseries.shape[1]

        if 'hpf' in session_info[0].keys():
            hpf = session_info[0]['hpf']
            drift_model=self.inputs.drift_model
        else:
            hpf=0
            drift_model = "Blank"

        reg_names = []
        for reg in session_info[0]['regress']:
            reg_names.append(reg['name'])

        reg_vals = np.zeros((nscans,len(reg_names)))
        for i in range(len(reg_names)):
            reg_vals[:,i] = np.array(session_info[0]['regress'][i]['val']).reshape(1,-1)


        frametimes= np.linspace(0, (nscans-1)*self.inputs.TR, nscans)

        conditions = []
        onsets = []
        duration = []

        for i,cond in enumerate(session_info[0]['cond']):
            onsets += cond['onset']
            conditions += [cond['name']]*len(cond['onset'])
            if len(cond['duration']) == 1:
                duration += cond['duration']*len(cond['onset'])
            else:
                duration += cond['duration']


        if conditions:
            paradigm =  BlockParadigm(con_id=conditions, onset=onsets, duration=duration)
        else:
            paradigm = None
        design_matrix, self._reg_names = dm.dmtx_light(frametimes, paradigm, drift_model=drift_model, hfcut=hpf,
               hrf_model=self.inputs.hrf_model,
               add_regs=reg_vals,
               add_reg_names=reg_names
               )
        if self.inputs.normalize_design_matrix:
            for i in range(len(self._reg_names)-1):
                design_matrix[:,i] = (design_matrix[:,i]-design_matrix[:,i].mean())/design_matrix[:,i].std()

        if self.inputs.plot_design_matrix:
            if pylab_available:
                pylab.pcolor(design_matrix)
                pylab.savefig("design_matrix.pdf")
                pylab.close()
                pylab.clf()
            else:
                Exception('Pylab not available for saving design matrix image')

        glm = GLM.glm()
        glm.fit(timeseries.T, design_matrix, method=self.inputs.method, model=self.inputs.model)


        self._beta_file = os.path.abspath("beta.nii")
        beta = np.zeros(mask.shape + (glm.beta.shape[0],))
        beta[mask,:] = glm.beta.T
        nb.save(nb.Nifti1Image(beta, nii.get_affine()), self._beta_file)

        self._s2_file = os.path.abspath("s2.nii")
        s2 = np.zeros(mask.shape)
        s2[mask] = glm.s2
        nb.save(nb.Nifti1Image(s2, nii.get_affine()), self._s2_file)

        if self.inputs.save_residuals:
            explained = np.dot(design_matrix,glm.beta)
            residuals = np.zeros(mask.shape + (nscans,))
            residuals[mask,:] = timeseries - explained.T
            self._residuals_file = os.path.abspath("residuals.nii")
            nb.save(nb.Nifti1Image(residuals, nii.get_affine()), self._residuals_file)

        self._nvbeta = glm.nvbeta
        self._dof = glm.dof
        self._constants = glm._constants
        self._axis = glm._axis
        if self.inputs.model == "ar1":
            self._a_file = os.path.abspath("a.nii")
            a = np.zeros(mask.shape)
            a[mask] = glm.a.squeeze()
            nb.save(nb.Nifti1Image(a, nii.get_affine()), self._a_file)
        self._model = glm.model
        self._method = glm.method

        return runtime
Ejemplo n.º 5
0
# write directory
write_dir = path.join(getcwd(), 'results')
if not path.exists(write_dir):
    mkdir(write_dir)

########################################
# Design matrix
########################################

paradigm = np.vstack(([conditions, onsets])).T
paradigm = EventRelatedParadigm(conditions, onsets)
X, names = dmtx_light(frametimes,
                      paradigm,
                      drift_model='cosine',
                      hfcut=128,
                      hrf_model=hrf_model,
                      add_regs=motion,
                      add_reg_names=add_reg_names)

########################################
# Create ROIs
########################################

positions = np.array([[60, -30, 5], [50, 27, 5]])
# in mm (here in the MNI space)
radii = np.array([8, 6])

domain = grid_domain_from_image(mask)
my_roi = mroi.subdomain_from_balls(domain, positions, radii)
Ejemplo n.º 6
0
    def _run_interface(self, runtime):
        import nibabel as nb
        import numpy as np
        import nipy.modalities.fmri.glm as GLM
        import nipy.modalities.fmri.design_matrix as dm

        try:
            BlockParadigm = dm.BlockParadigm
        except AttributeError:
            from nipy.modalities.fmri.experimental_paradigm import BlockParadigm

        session_info = self.inputs.session_info

        functional_runs = self.inputs.session_info[0]["scans"]
        if isinstance(functional_runs, (str, bytes)):
            functional_runs = [functional_runs]
        nii = nb.load(functional_runs[0])
        data = nii.get_data()

        if isdefined(self.inputs.mask):
            mask = nb.load(self.inputs.mask).get_data() > 0
        else:
            mask = np.ones(nii.shape[:3]) == 1

        timeseries = data.copy()[mask, :]
        del data

        for functional_run in functional_runs[1:]:
            nii = nb.load(functional_run, mmap=NUMPY_MMAP)
            data = nii.get_data()
            npdata = data.copy()
            del data
            timeseries = np.concatenate((timeseries, npdata[mask, :]), axis=1)
            del npdata

        nscans = timeseries.shape[1]

        if "hpf" in list(session_info[0].keys()):
            hpf = session_info[0]["hpf"]
            drift_model = self.inputs.drift_model
        else:
            hpf = 0
            drift_model = "Blank"

        reg_names = []
        for reg in session_info[0]["regress"]:
            reg_names.append(reg["name"])

        reg_vals = np.zeros((nscans, len(reg_names)))
        for i in range(len(reg_names)):
            reg_vals[:, i] = np.array(
                session_info[0]["regress"][i]["val"]).reshape(1, -1)

        frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans)

        conditions = []
        onsets = []
        duration = []

        for i, cond in enumerate(session_info[0]["cond"]):
            onsets += cond["onset"]
            conditions += [cond["name"]] * len(cond["onset"])
            if len(cond["duration"]) == 1:
                duration += cond["duration"] * len(cond["onset"])
            else:
                duration += cond["duration"]

        if conditions:
            paradigm = BlockParadigm(con_id=conditions,
                                     onset=onsets,
                                     duration=duration)
        else:
            paradigm = None
        design_matrix, self._reg_names = dm.dmtx_light(
            frametimes,
            paradigm,
            drift_model=drift_model,
            hfcut=hpf,
            hrf_model=self.inputs.hrf_model,
            add_regs=reg_vals,
            add_reg_names=reg_names,
        )
        if self.inputs.normalize_design_matrix:
            for i in range(len(self._reg_names) - 1):
                design_matrix[:, i] = (
                    design_matrix[:, i] -
                    design_matrix[:, i].mean()) / design_matrix[:, i].std()

        if self.inputs.plot_design_matrix:
            import pylab

            pylab.pcolor(design_matrix)
            pylab.savefig("design_matrix.pdf")
            pylab.close()
            pylab.clf()

        glm = GLM.GeneralLinearModel()
        glm.fit(
            timeseries.T,
            design_matrix,
            method=self.inputs.method,
            model=self.inputs.model,
        )

        self._beta_file = os.path.abspath("beta.nii")
        beta = np.zeros(mask.shape + (glm.beta.shape[0], ))
        beta[mask, :] = glm.beta.T
        nb.save(nb.Nifti1Image(beta, nii.affine), self._beta_file)

        self._s2_file = os.path.abspath("s2.nii")
        s2 = np.zeros(mask.shape)
        s2[mask] = glm.s2
        nb.save(nb.Nifti1Image(s2, nii.affine), self._s2_file)

        if self.inputs.save_residuals:
            explained = np.dot(design_matrix, glm.beta)
            residuals = np.zeros(mask.shape + (nscans, ))
            residuals[mask, :] = timeseries - explained.T
            self._residuals_file = os.path.abspath("residuals.nii")
            nb.save(nb.Nifti1Image(residuals, nii.affine),
                    self._residuals_file)

        self._nvbeta = glm.nvbeta
        self._dof = glm.dof
        self._constants = glm._constants
        self._axis = glm._axis
        if self.inputs.model == "ar1":
            self._a_file = os.path.abspath("a.nii")
            a = np.zeros(mask.shape)
            a[mask] = glm.a.squeeze()
            nb.save(nb.Nifti1Image(a, nii.affine), self._a_file)
        self._model = glm.model
        self._method = glm.method

        return runtime
Ejemplo n.º 7
0
onsets = np.linspace(5, (n_scans - 1) * tr - 10, 20) # in seconds
hrf_model = 'canonical'
motion = np.cumsum(np.random.randn(n_scans, 6), 0)
add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']

# write directory
write_dir = os.getcwd()

########################################
# Design matrix
########################################

paradigm = np.vstack(([conditions, onsets])).T
paradigm = EventRelatedParadigm(conditions, onsets)
X, names = dmtx_light(frametimes, paradigm, drift_model='cosine', hfcut=128,
                      hrf_model=hrf_model, add_regs=motion,
                      add_reg_names=add_reg_names)


#######################################
# Get the FMRI data
#######################################

fmri_data = surrogate_4d_dataset(mask=mask, dmtx=X, seed=1)[0]

# if you want to save it as an image
# data_file = op.join(write_dir,'fmri_data.nii')
# save(fmri_data, data_file)

########################################
# Perform a GLM analysis
Ejemplo n.º 8
0
#   'canonical with derivative' or 'fir'
hrf_model = 'canonical'

# fake motion parameters to be included in the model
motion = np.cumsum(np.random.randn(n_scans, 6), 0)
add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']

########################################
# Design matrix
########################################

paradigm = EventRelatedParadigm(conditions, onsets)
X, names = dm.dmtx_light(frametimes,
                         paradigm,
                         drift_model='cosine',
                         hfcut=128,
                         hrf_model=hrf_model,
                         add_regs=motion,
                         add_reg_names=add_reg_names)

#######################################
# Get the FMRI data
#######################################

fmri_data = surrogate_4d_dataset(shape=shape, n_scans=n_scans)[0]

# if you want to save it as an image
data_file = 'fmri_data.nii'
save(fmri_data, data_file)

########################################
Ejemplo n.º 9
0
conditions = np.arange(20) % 2
onsets = np.linspace(5, (n_scans - 1) * tr - 10, 20) # in seconds
hrf_model = 'Canonical'
motion = np.cumsum(np.random.randn(n_scans, 6), 0)
add_reg_names = ['tx', 'ty', 'tz', 'rx', 'ry', 'rz']

# write directory
swd = tempfile.mkdtemp()

########################################
# Design matrix
########################################

paradigm = EventRelatedParadigm(conditions, onsets)
X, names = dm.dmtx_light(frametimes, paradigm, drift_model='Cosine', hfcut=128,
                         hrf_model=hrf_model, add_regs=motion,
                         add_reg_names=add_reg_names)


#######################################
# Get the FMRI data
#######################################

fmri_data = surrogate_4d_dataset(shape=shape, n_scans=n_scans)[0]

# if you want to save it as an image
data_file = op.join(swd, 'fmri_data.nii')
save(fmri_data, data_file)

########################################
# Perform a GLM analysis
Ejemplo n.º 10
0
    def _run_interface(self, runtime):

        session_info = self.inputs.session_info

        functional_runs = self.inputs.session_info[0]['scans']
        if isinstance(functional_runs, str):
            functional_runs = [functional_runs]
        nii = nb.load(functional_runs[0])
        data = nii.get_data()

        if isdefined(self.inputs.mask):
            mask = nb.load(self.inputs.mask).get_data() > 0
        else:
            mask = np.ones(nii.shape[:3]) == 1

        timeseries = data.copy()[mask, :]
        del data

        for functional_run in functional_runs[1:]:
            nii = nb.load(functional_run)
            data = nii.get_data()
            npdata = data.copy()
            del data
            timeseries = np.concatenate((timeseries, npdata[mask, :]), axis=1)
            del npdata

        nscans = timeseries.shape[1]

        if 'hpf' in session_info[0].keys():
            hpf = session_info[0]['hpf']
            drift_model = self.inputs.drift_model
        else:
            hpf = 0
            drift_model = "Blank"

        reg_names = []
        for reg in session_info[0]['regress']:
            reg_names.append(reg['name'])

        reg_vals = np.zeros((nscans, len(reg_names)))
        for i in range(len(reg_names)):
            reg_vals[:, i] = np.array(
                session_info[0]['regress'][i]['val']).reshape(1, -1)

        frametimes = np.linspace(0, (nscans - 1) * self.inputs.TR, nscans)

        conditions = []
        onsets = []
        duration = []

        for i, cond in enumerate(session_info[0]['cond']):
            onsets += cond['onset']
            conditions += [cond['name']] * len(cond['onset'])
            if len(cond['duration']) == 1:
                duration += cond['duration'] * len(cond['onset'])
            else:
                duration += cond['duration']

        if conditions:
            paradigm = BlockParadigm(con_id=conditions,
                                     onset=onsets,
                                     duration=duration)
        else:
            paradigm = None
        design_matrix, self._reg_names = dm.dmtx_light(
            frametimes,
            paradigm,
            drift_model=drift_model,
            hfcut=hpf,
            hrf_model=self.inputs.hrf_model,
            add_regs=reg_vals,
            add_reg_names=reg_names)
        if self.inputs.normalize_design_matrix:
            for i in range(len(self._reg_names) - 1):
                design_matrix[:, i] = (
                    design_matrix[:, i] -
                    design_matrix[:, i].mean()) / design_matrix[:, i].std()

        if self.inputs.plot_design_matrix:
            import pylab
            pylab.pcolor(design_matrix)
            pylab.savefig("design_matrix.pdf")
            pylab.close()
            pylab.clf()

        glm = GLM.glm()
        glm.fit(timeseries.T,
                design_matrix,
                method=self.inputs.method,
                model=self.inputs.model)

        self._beta_file = os.path.abspath("beta.nii")
        beta = np.zeros(mask.shape + (glm.beta.shape[0], ))
        beta[mask, :] = glm.beta.T
        nb.save(nb.Nifti1Image(beta, nii.get_affine()), self._beta_file)

        self._s2_file = os.path.abspath("s2.nii")
        s2 = np.zeros(mask.shape)
        s2[mask] = glm.s2
        nb.save(nb.Nifti1Image(s2, nii.get_affine()), self._s2_file)

        if self.inputs.save_residuals:
            explained = np.dot(design_matrix, glm.beta)
            residuals = np.zeros(mask.shape + (nscans, ))
            residuals[mask, :] = timeseries - explained.T
            self._residuals_file = os.path.abspath("residuals.nii")
            nb.save(nb.Nifti1Image(residuals, nii.get_affine()),
                    self._residuals_file)

        self._nvbeta = glm.nvbeta
        self._dof = glm.dof
        self._constants = glm._constants
        self._axis = glm._axis
        if self.inputs.model == "ar1":
            self._a_file = os.path.abspath("a.nii")
            a = np.zeros(mask.shape)
            a[mask] = glm.a.squeeze()
            nb.save(nb.Nifti1Image(a, nii.get_affine()), self._a_file)
        self._model = glm.model
        self._method = glm.method

        return runtime
Ejemplo n.º 11
0
    def _run_interface(self, runtime):

        data_list = [nb.load(fn).get_data() for fn in self.inputs.data_files]
        onsets = self.inputs.onsets

        paradigm = EventRelatedParadigm(["a"] * len(onsets), onsets)
        frametimes = np.arange(0, data_list[0].shape[-1] * self.inputs.TR, self.inputs.TR)
        X, names = dm.dmtx_light(frametimes, paradigm, drift_model="polynomial", hfcut=128, hrf_model="canonical")

        self.X = X

        X_T_inv = np.linalg.pinv(np.dot(X.T, X))
        calc_beta = np.dot(X_T_inv, X.T)

        # Do OLS
        mean_data = np.mean(data_list, 0)
        self.ols_beta = np.dot(calc_beta, mean_data.reshape(np.prod(mean_data.shape[:-1]), mean_data.shape[-1]).T)
        predicted = np.dot(X, self.ols_beta).T.reshape(mean_data.shape)
        resid = mean_data - predicted
        ss = bottleneck.ss(resid, -1)
        self.ols_var = np.outer(X_T_inv, ss)

        # Create individual residuals for sandwhich:
        self.sss = np.zeros(ss.shape)
        self.residuals = []
        for data in data_list:
            beta = np.dot(calc_beta, data.reshape(np.prod(data.shape[:-1]), mean_data.shape[-1]).T)
            self.predicted = np.dot(X, self.ols_beta).T.reshape(mean_data.shape)
            resid = mean_data - self.predicted
            self.residuals.append(resid)
            self.sss += bottleneck.ss(resid, -1)

        if len(data_list) > 5:
            self.sss = self.sss / (len(data_list) - 1)
        else:
            self.sss = self.sss / len(data_list)

        self.sandwich_var = np.outer(np.dot(calc_beta, calc_beta.T), self.sss) / len(data_list)

        self.contrasts = np.array(self.inputs.contrasts)

        self.residuals = np.array(self.residuals).swapaxes(0, -1)

        self.sandwich_var = self.sandwich_var.T.reshape(mean_data.shape[:-1] + (-1,))

        self.ols_beta = self.ols_beta.T.reshape(mean_data.shape[:-1] + (-1,))
        self.ols_var = self.ols_var.T.reshape(mean_data.shape[:-1] + (-1,))

        if self.inputs.variance_to_use == "ols":
            self.z = (self.ols_beta[:, :, 0] / (np.sqrt(self.ols_var[:, :, 0]) / np.sqrt(len(data_list)))).squeeze()
        else:
            self.z = (
                self.ols_beta[:, :, 0] / (np.sqrt(self.sandwich_var[:, :, 0]) / np.sqrt(len(data_list)))
            ).squeeze()

        self.z = self.z.T.reshape(mean_data.shape[:-1] + (-1,))

        nb.save(nb.Nifti1Image(self.ols_beta, np.identity(4)), "ols_beta.nii.gz")
        nb.save(nb.Nifti1Image(self.ols_var, np.identity(4)), "ols_var.nii.gz")
        nb.save(nb.Nifti1Image(self.sandwich_var, np.identity(4)), "sandwich_var.nii.gz")
        nb.save(nb.Nifti1Image(self.z, np.identity(4)), "z_%s.nii.gz" % self.inputs.variance_to_use)
        nb.save(nb.Nifti1Image(self.residuals, np.identity(4)), "residuals.nii.gz")

        save(self.X, "design_matrix.hdf5")

        return runtime