def test_fir_glm(self): from pyhrf import FmriData from pyhrf.glm import glm_nipy fdata = FmriData.from_vol_ui() glm_nipy(fdata, hrf_model='FIR', fir_delays=range(10))
def test_glm_default_real_data(self): from pyhrf import FmriData from pyhrf.glm import glm_nipy fdata = FmriData.from_vol_ui() glm_nipy(fdata)
def test_fir_glm(self): from pyhrf import FmriData from pyhrf.glm import glm_nipy #pyhrf.verbose.set_verbosity(3) fdata = FmriData.from_vol_ui() # print 'fdata:' # print fdata.getSummary() glm_nipy(fdata, hrf_model='FIR', fir_delays=range(10))
def test_glm_default_real_data(self): from pyhrf import FmriData from pyhrf.glm import glm_nipy #pyhrf.verbose.set_verbosity(3) fdata = FmriData.from_vol_ui() # print 'fdata:' # print fdata.getSummary() glm_nipy(fdata)
def test_glm_contrasts(self): from pyhrf import FmriData from pyhrf.glm import glm_nipy cons = {'audio-video': 'audio - video', 'video-audio': 'video - audio', } fdata = FmriData.from_vol_ui() g, dm, cons = glm_nipy(fdata, contrasts=cons)
def test_glm_contrasts(self): from pyhrf import FmriData from pyhrf.glm import glm_nipy cons = { 'audio-video': 'audio - video', 'video-audio': 'video - audio', } fdata = FmriData.from_vol_ui() g, dm, cons = glm_nipy(fdata, contrasts=cons)
def test_glm_contrasts(self): from pyhrf import FmriData from pyhrf.glm import glm_nipy cons = {'audio-video': 'audio - video', 'video-audio': 'video - audio', } #pyhrf.verbose.set_verbosity(3) fdata = FmriData.from_vol_ui() # print 'fdata:' # print fdata.getSummary() g, dm, cons = glm_nipy(fdata, contrasts=cons)
def analyse_roi(self, fdata): glm = glm_nipy(fdata, contrasts=None, hrf_model='Canonical', drift_model='Cosine', hfcut=128, residuals_model='spherical', fit_method='ols', fir_duration=None, fir_delays=None) outputs = {} ns = fdata.shape[0] dm = glm.design_matrix.matrix tr = fdata.tr cdesign_matrix = xndarray(dm, axes_names=['time','regressor'], axes_domains={'time':np.arange(ns)*tr, 'regressor':dm.names}) outputs['design_matrix'] = cdesign_matrix return outputs
def analyse_roi(self, fdata): glm = glm_nipy( fdata, contrasts=None, hrf_model="Canonical", drift_model="Cosine", hfcut=128, residuals_model="spherical", fit_method="ols", fir_duration=None, fir_delays=None, ) outputs = {} ns = fdata.shape[0] dm = glm.design_matrix.matrix tr = fdata.tr cdesign_matrix = xndarray( dm, axes_names=["time", "regressor"], axes_domains={"time": np.arange(ns) * tr, "regressor": dm.names} ) outputs["design_matrix"] = cdesign_matrix return outputs
def analyse_roi(self, fdata): pyhrf.verbose(1, 'Run GLM analysis (ROI %d) ...' %fdata.get_roi_id()) if self.rescale_factor is not None: m = np.where(fdata.roiMask) rescale_factor = self.rescale_factor[:,m[0],m[1],m[2]] else: rescale_factor = None glm, dm, cons = glm_nipy(fdata, contrasts=self.contrasts, hrf_model=self.hrf_model, drift_model=self.drift_model, hfcut=self.hfcut, residuals_model=self.residuals_model, fit_method=self.fit_method, fir_delays=self.fir_delays, rescale_results=self.rescale_results, rescale_factor=rescale_factor) outputs = {} ns, nr = dm.matrix.shape tr = fdata.tr if rescale_factor is not None: #same sf for all voxels dm.matrix[:,:rescale_factor.shape[0]] /= rescale_factor[:,0] cdesign_matrix = xndarray(dm.matrix, axes_names=['time','regressor'], axes_domains={'time':np.arange(ns)*tr, 'regressor':dm.names}) outputs['design_matrix'] = cdesign_matrix axes_names = ['time', 'voxel'] axes_domains = {'time' : np.arange(ns)*tr} bold = xndarray(fdata.bold.astype(np.float32), axes_names=axes_names, axes_domains=axes_domains, value_label='BOLD') fit = np.dot(dm.matrix, glm.beta) cfit = xndarray(fit, axes_names=['time','voxel'], axes_domains={'time':np.arange(ns)*tr}) outputs['bold_fit'] = stack_cuboids([bold,cfit], 'stype', ['bold', 'fit']) nb_cond = fdata.nbConditions fit_cond = np.dot(dm.matrix[:,:nb_cond], glm.beta[:nb_cond,:]) fit_cond -= fit_cond.mean(0) fit_cond += fdata.bold.mean(0) outputs['fit_cond'] = xndarray(fit_cond, axes_names=['time','voxel'], axes_domains={'time':np.arange(ns)*tr}) outputs['s2'] = xndarray(glm.s2, axes_names=['voxel']) if 0: cbeta = xndarray(glm.beta, axes_names=['reg_name','voxel'], axes_domains={'reg_name':dm.names}) outputs['beta'] = cbeta else: if self.hrf_model == 'FIR': fir = dict((d * fdata.tr, OrderedDict()) for d in self.fir_delays) for ib, bname in enumerate(dm.names): outputs['beta_' + bname] = xndarray(glm.beta[ib], axes_names=['voxel']) if self.hrf_model == 'FIR' and 'delay' in bname: #reconstruct filter: cond, delay = bname.split('_delay_') delay = int(delay) * fdata.tr fir[delay][cond] = xndarray(glm.beta[ib], axes_names=['voxel']) if self.hrf_model == 'FIR': chrf = tree_to_cuboid(fir, ['time', 'condition']) outputs['hrf'] = chrf outputs['hrf_norm'] = (chrf**2).sum('time')**.5 for cname, con in cons.iteritems(): #print 'con:' #print dir(con) outputs['con_effect_'+cname] = xndarray(con.effect, axes_names=['voxel']) #print '%%%%%%% con.variance:', con.variance.shape ncon = con.effect / con.variance.std() outputs['ncon_effect_'+cname] = xndarray(ncon, axes_names=['voxel']) outputs['con_pvalue_'+cname] = xndarray(con.pvalue(self.con_bl), axes_names=['voxel']) roi_lab_vol = np.zeros(fdata.get_nb_vox_in_mask(), dtype=np.int32) + \ fdata.get_roi_id() outputs['mask'] = xndarray(roi_lab_vol, axes_names=['voxel']) # for ib, bname in enumerate(design_matrix.names): # beta_vol = expand_array_in_mask(my_glm.beta[ib], mask_array) # beta_image = Nifti1Image(beta_vol, affine) # beta_file = op.join(output_dir, 'beta_%s.nii' %bname) # save(beta_image, beta_file) # beta_files.append(beta_file) return outputs
def parcellation_for_jde(fmri_data, avg_parcel_size=250, output_dir=None, method='gkm', glm_drift='Cosine', glm_hfcut=128): """ method: gkm, ward, ward_and_gkm """ if output_dir is None: output_dir = tempfile.mkdtemp(prefix='pyhrf_JDE_parcellation_GLM', dir=pyhrf.cfg['global']['tmp_path']) glm_output_dir = op.join(output_dir, 'GLM_for_parcellation') if not op.exists(glm_output_dir): os.makedirs(glm_output_dir) pyhrf.verbose(1, 'GLM for parcellation') # if fmri_data.data_type == 'volume': # paradigm_file, bold_file, mask_file = fmri_data.save(glm_output_dir) # beta_files = glm_nipy_from_files(bold_file, fmri_data.tr, paradigm_file, # glm_output_dir, mask_file, # drift_model=glm_drift, hfcut=glm_hfcut) # elif fmri_data.data_type == 'surface': # beta_files = glm_nipy(fmri_data, glm_output_dir, # drift_model=glm_drift, hfcut=glm_hfcut) g, dm, cons = glm_nipy(fmri_data, drift_model=glm_drift, hfcut=glm_hfcut) pval_files = [] if cons is not None: func_data = [('con_pval_%s' %cname, con.pvalue()) \ for cname, con in cons.iteritems()] else: reg_cst_drift = re.compile(".*constant.*|.*drift.*") func_data = [('beta_%s' %reg_name, g.beta[ir]) \ for ir,reg_name in enumerate(dm.names) \ if not reg_cst_drift.match(reg_name)] for name, data in func_data: val_vol = expand_array_in_mask(data, fmri_data.roiMask>0) val_fn = op.join(glm_output_dir, '%s.nii' %name) write_volume(val_vol, val_fn, fmri_data.meta_obj) pval_files.append(val_fn) mask_file = op.join(glm_output_dir,'mask.nii') write_volume(fmri_data.roiMask>0, mask_file, fmri_data.meta_obj) nvox = fmri_data.get_nb_vox_in_mask() nparcels = round_nb_parcels(nvox * 1. / avg_parcel_size) pyhrf.verbose(1, 'Parcellation from GLM outputs, method: %s, ' \ 'nb parcels: %d' %(method, nparcels)) if fmri_data.data_type == 'volume': parcellation_file = op.join(output_dir, 'parcellation_%s_np%d.nii' %(method, nparcels)) make_parcellation_from_files(pval_files, mask_file, parcellation_file, nparcels, method) parcellation,_ = read_volume(parcellation_file) else: mesh_file = fmri_data.data_files[-1] parcellation_file = op.join(output_dir, 'parcellation_%s_np%d.gii' %(method, nparcels)) make_parcellation_surf_from_files(pval_files, mesh_file, parcellation_file, nparcels, method, verbose=1) parcellation,_ = read_texture(parcellation_file) #print parcellation_file pyhrf.verbose(1, parcellation_report(parcellation)) return parcellation, parcellation_file
def parcellation_for_jde(fmri_data, avg_parcel_size=250, output_dir=None, method='gkm', glm_drift='Cosine', glm_hfcut=128): """ method: gkm, ward, ward_and_gkm """ if output_dir is None: output_dir = tempfile.mkdtemp(prefix='pyhrf_JDE_parcellation_GLM', dir=pyhrf.cfg['global']['tmp_path']) glm_output_dir = op.join(output_dir, 'GLM_for_parcellation') if not op.exists(glm_output_dir): os.makedirs(glm_output_dir) logger.info('GLM for parcellation') g, dm, cons = glm_nipy(fmri_data, drift_model=glm_drift, hfcut=glm_hfcut) pval_files = [] if cons is not None: func_data = [('con_pval_%s' % cname, con.pvalue()) for cname, con in cons.iteritems()] else: reg_cst_drift = re.compile(".*constant.*|.*drift.*") func_data = [('beta_%s' % reg_name, g.beta[ir]) for ir, reg_name in enumerate(dm.names) if not reg_cst_drift.match(reg_name)] for name, data in func_data: val_vol = expand_array_in_mask(data, fmri_data.roiMask > 0) val_fn = op.join(glm_output_dir, '%s.nii' % name) write_volume(val_vol, val_fn, fmri_data.meta_obj) pval_files.append(val_fn) mask_file = op.join(glm_output_dir, 'mask.nii') write_volume(fmri_data.roiMask > 0, mask_file, fmri_data.meta_obj) nvox = fmri_data.get_nb_vox_in_mask() nparcels = round_nb_parcels(nvox * 1. / avg_parcel_size) logger.info('Parcellation from GLM outputs, method: %s, nb parcels: %d', method, nparcels) if fmri_data.data_type == 'volume': parcellation_file = op.join( output_dir, 'parcellation_%s_np%d.nii' % (method, nparcels)) make_parcellation_from_files(pval_files, mask_file, parcellation_file, nparcels, method) parcellation, _ = read_volume(parcellation_file) else: mesh_file = fmri_data.data_files[-1] parcellation_file = op.join( output_dir, 'parcellation_%s_np%d.gii' % (method, nparcels)) make_parcellation_surf_from_files(pval_files, mesh_file, parcellation_file, nparcels, method, verbose=1) parcellation, _ = read_texture(parcellation_file) logger.info(parcellation_report(parcellation)) return parcellation, parcellation_file
def analyse_roi(self, fdata): logger.info('Run GLM analysis (ROI %d) ...', fdata.get_roi_id()) if self.rescale_factor is not None: m = np.where(fdata.roiMask) rescale_factor = self.rescale_factor[:, m[0], m[1], m[2]] else: rescale_factor = None glm, dm, cons = glm_nipy(fdata, contrasts=self.contrasts, hrf_model=self.hrf_model, drift_model=self.drift_model, hfcut=self.hfcut, residuals_model=self.residuals_model, fit_method=self.fit_method, fir_delays=self.fir_delays, rescale_results=self.rescale_results, rescale_factor=rescale_factor) outputs = {} ns, nr = dm.matrix.shape tr = fdata.tr if rescale_factor is not None: # same sf for all voxels dm.matrix[:, :rescale_factor.shape[0]] /= rescale_factor[:, 0] cdesign_matrix = xndarray(dm.matrix, axes_names=['time', 'regressor'], axes_domains={ 'time': np.arange(ns) * tr, 'regressor': dm.names }) outputs['design_matrix'] = cdesign_matrix if self.output_fit: axes_names = ['time', 'voxel'] axes_domains = {'time': np.arange(ns) * tr} bold = xndarray(fdata.bold.astype(np.float32), axes_names=axes_names, axes_domains=axes_domains, value_label='BOLD') fit = np.dot(dm.matrix, glm.beta) cfit = xndarray(fit, axes_names=['time', 'voxel'], axes_domains={'time': np.arange(ns) * tr}) outputs['bold_fit'] = stack_cuboids([bold, cfit], 'stype', ['bold', 'fit']) nb_cond = fdata.nbConditions fit_cond = np.dot(dm.matrix[:, :nb_cond], glm.beta[:nb_cond, :]) fit_cond -= fit_cond.mean(0) fit_cond += fdata.bold.mean(0) outputs['fit_cond'] = xndarray( fit_cond, axes_names=['time', 'voxel'], axes_domains={'time': np.arange(ns) * tr}) s2 = np.atleast_1d(glm.s2) outputs['s2'] = xndarray(s2, axes_names=['voxel']) if 0: cbeta = xndarray(glm.beta, axes_names=['reg_name', 'voxel'], axes_domains={'reg_name': dm.names}) outputs['beta'] = cbeta else: if self.hrf_model == 'FIR': fir = dict( (d * fdata.tr, OrderedDict()) for d in self.fir_delays) for ib, bname in enumerate(dm.names): outputs['beta_' + bname] = xndarray(glm.beta[ib], axes_names=['voxel']) if self.hrf_model == 'FIR' and 'delay' in bname: # reconstruct filter: cond, delay = bname.split('_delay_') delay = int(delay) * fdata.tr fir[delay][cond] = xndarray(glm.beta[ib], axes_names=['voxel']) if self.hrf_model == 'FIR': chrf = tree_to_xndarray(fir, ['time', 'condition']) outputs['hrf'] = chrf outputs['hrf_norm'] = (chrf**2).sum('time')**.5 for cname, con in cons.iteritems(): # print 'con:' # print dir(con) outputs['con_effect_' + cname] = xndarray(con.effect, axes_names=['voxel']) # print '%%%%%%% con.variance:', con.variance.shape ncon = con.effect / con.variance.std() outputs['ncon_effect_' + cname] = xndarray( ncon, axes_names=['voxel']) outputs['con_pvalue_' + cname] = xndarray(con.pvalue( self.con_bl), axes_names=['voxel']) roi_lab_vol = np.zeros(fdata.get_nb_vox_in_mask(), dtype=np.int32) + \ fdata.get_roi_id() outputs['mask'] = xndarray(roi_lab_vol, axes_names=['voxel']) # for ib, bname in enumerate(design_matrix.names): # beta_vol = expand_array_in_mask(my_glm.beta[ib], mask_array) # beta_image = Nifti1Image(beta_vol, affine) # beta_file = op.join(output_dir, 'beta_%s.nii' %bname) # save(beta_image, beta_file) # beta_files.append(beta_file) return outputs