def merge_fmri_sessions(fmri_data_sets): """ fmri_data_sets: list of FmriData objects. Each FmriData object is assumed to contain only one session """ all_onsets = stack_trees([fd.paradigm.stimOnsets for fd in fmri_data_sets]) all_onsets = apply_to_leaves(all_onsets, lambda l: [e[0] for e in l]) durations = stack_trees([fd.paradigm.stimDurations for fd in fmri_data_sets]) durations = apply_to_leaves(durations, lambda x: [e[0] for e in x]) bold = np.concatenate([fd.bold for fd in fmri_data_sets]) ss = [] lastScan = 0 for fd in fmri_data_sets: nscans = fd.bold.shape[0] ss.append(np.arange(lastScan,lastScan+nscans,dtype=int)) lastScan += nscans if fd.simulation is not None: simu = [fd.simulation[0] for fd in fmri_data_sets] else: simu=None return FmriData(all_onsets, bold, fmri_data_sets[0].tr, ss, fmri_data_sets[0].roiMask, fmri_data_sets[0].graphs, durations, fmri_data_sets[0].meta_obj, simu, backgroundLabel=fmri_data_sets[0].backgroundLabel, data_files=fmri_data_sets[0].data_files, data_type=fmri_data_sets[0].data_type, edge_lengths=fmri_data_sets[0].edge_lengths, mask_loaded_from_file=fmri_data_sets[0].mask_loaded_from_file)
def outputResults( self, results, output_dir, filter='.\A', ): """ Return: a tuple (dictionary of outputs, output file names) """ if output_dir is None: return {}, [] # print results if not isinstance(results[0][0], (FmriData, FmriGroupData)): self.outputResults_back_compat(results, output_dir, filter) return {}, [] logger.info('Building outputs from %d results ...', len(results)) logger.debug('results :') logger.debug(results) # Handle analyses that crashed results = self.filter_crashed_results(results) if len(results) == 0: logger.error('No result to treat. Did everything crash ?') return {}, [] # Merge all the analysis outputs target_shape = results[0][0].spatial_shape meta_data = results[0][0].meta_obj if len(target_shape) == 3: # Volumic data: targetAxes = MRI3Daxes # ['axial','coronal', 'sagittal'] ext = '.nii' else: # surfacic targetAxes = ['voxel'] ext = '.gii' if self.gzip_outputs: ext += '.gz' if hasattr(results[0][1], 'getOutputs'): all_outputs = stack_trees([r[1].getOutputs() for r in results]) else: all_outputs = stack_trees([r[1] for r in results]) data_rois = [r[0] for r in results] irois = [d.get_roi_id() for d in data_rois] if isinstance(results[0][0], FmriData): return self.make_outputs_single_subject(data_rois, irois, all_outputs, targetAxes, ext, meta_data, output_dir) else: return self.make_outputs_multi_subjects(data_rois, irois, all_outputs, targetAxes, ext, meta_data, output_dir)
def outputResults(self, results, output_dir, filter='.\A',): """ Return: a tuple (dictionary of outputs, output file names) """ if output_dir is None: return {}, [] # print results if not isinstance(results[0][0], (FmriData, FmriGroupData)): self.outputResults_back_compat(results, output_dir, filter) return {}, [] logger.info('Building outputs from %d results ...', len(results)) logger.debug('results :') logger.debug(results) # Handle analyses that crashed results = self.filter_crashed_results(results) if len(results) == 0: logger.info('No more result to treat. Did everything crash ?') return {}, [] # Merge all the analysis outputs target_shape = results[0][0].spatial_shape meta_data = results[0][0].meta_obj if len(target_shape) == 3: # Volumic data: targetAxes = MRI3Daxes # ['axial','coronal', 'sagittal'] ext = '.nii' else: # surfacic targetAxes = ['voxel'] ext = '.gii' if self.gzip_outputs: ext += '.gz' if hasattr(results[0][1], 'getOutputs'): all_outputs = stack_trees([r[1].getOutputs() for r in results]) else: all_outputs = stack_trees([r[1] for r in results]) data_rois = [r[0] for r in results] irois = [d.get_roi_id() for d in data_rois] if isinstance(results[0][0], FmriData): return self.make_outputs_single_subject(data_rois, irois, all_outputs, targetAxes, ext, meta_data, output_dir) else: return self.make_outputs_multi_subjects(data_rois, irois, all_outputs, targetAxes, ext, meta_data, output_dir)
def view_results(bolds, estimOutputs, betas): outputSimu = stack_trees(map(getSimulOutputs, bolds)) for n,o in outputSimu.iteritems(): outputSimu[n] = stack_cuboids(o, 'betaSimu', betas) views = dict(zip(outputSimu.keys(), map(xndarrayView,outputSimu.values()))) outputEstim = stack_trees(estimOutputs) for n,o in outputEstim.iteritems(): outputEstim[n] = stack_cuboids(o, 'betaSimu', betas) views.update(dict(zip(outputEstim.keys(), map(xndarrayView,outputEstim.values())))) ndview.multiView(views)
def outputResults(self, results, output_dir, filter='.\A',): """ Return: a tuple (dictionary of outputs, output file names) """ if output_dir is None: return {}, [] #print results if not isinstance(results[0][0], (FmriData, FmriGroupData)): self.outputResults_back_compat(results, output_dir, filter) return {}, [] pyhrf.verbose(1,'Building outputs from %d results ...' %len(results)) pyhrf.verbose(6, 'results :') pyhrf.verbose.printDict(6, results, exclude=['xmlHandler']) results = self.filter_crashed_results(results) if len(results) == 0: pyhrf.verbose(1, 'No more result to treat. Did everything crash ?') return {}, [] target_shape = results[0][0].spatial_shape meta_data = results[0][0].meta_obj if len(target_shape) == 3: #Volumic data: targetAxes = MRI3Daxes #['axial','coronal', 'sagittal'] ext = '.nii' else: #surfacic targetAxes = ['voxel'] ext = '.gii' if hasattr(results[0][1], 'getOutputs'): all_outputs = stack_trees([r[1].getOutputs() for r in results]) else: all_outputs = stack_trees([r[1] for r in results]) data_rois = [r[0] for r in results] irois = [d.get_roi_id() for d in data_rois] if isinstance(results[0][0], FmriData): return self.make_outputs_single_subject(data_rois, irois, all_outputs, targetAxes, ext, meta_data, output_dir) else: return self.make_outputs_multi_subjects(data_rois, irois, all_outputs, targetAxes, ext, meta_data, output_dir)
def setUp(self): def mixt(m,v,p): assert len(m) == len(v) assert len(m) == len(p) return {'means':np.array(m), 'variances':np.array(v), 'props':np.array(p)} self.mixture_inactive = mixt([0.,0.], [1., 1.], [1., 0.]) self.mixture_active = mixt([0.,100.], [1., 1.], [0., 1.]) self.mixture_half = mixt([-1.,1.], [1., 1.], [.5, .5]) def stack_comp(l): return np.array(l).T self.mixt_stack = (self.mixture_inactive, self.mixture_active, self.mixture_half) self.mixt_map = stack_trees(self.mixt_stack, join_func=stack_comp) self.n_pos = len(self.mixt_stack) if 0: print 'self.mixture_inactive:' print self.mixture_inactive print 'self.mixture_active:' print self.mixture_active print 'self.mixture_half:' print self.mixture_half print 'self.mixt_map:' pprint(self.mixt_map)
def from_spm_mat(self, spm_mat_file): """ TODO: handle session durations """ par_data,_ = load_paradigm_from_mat(spm_mat_file) par_data = stack_trees([par_data[s] for s in sorted(par_data.keys())]) return Paradigm(par_data['onsets'], stimDurations=par_data['stimulusLength'])
def from_spm_mat(self, spm_mat_file): """ TODO: handle session durations """ par_data, _ = load_paradigm_from_mat(spm_mat_file) par_data = stack_trees([par_data[s] for s in sorted(par_data.keys())]) return Paradigm(par_data['onsets'], stimDurations=par_data['stimulusLength'])
def merge_fmri_sessions(fmri_data_sets): """ fmri_data_sets: list of FmriData objects. Each FmriData object is assumed to contain only one session """ from pyhrf.tools import apply_to_leaves all_onsets = stack_trees([fd.paradigm.stimOnsets for fd in fmri_data_sets]) all_onsets = apply_to_leaves(all_onsets, lambda l: [e[0] for e in l]) durations = stack_trees( [fd.paradigm.stimDurations for fd in fmri_data_sets]) durations = apply_to_leaves(durations, lambda x: [e[0] for e in x]) bold = np.concatenate([fd.bold for fd in fmri_data_sets]) session_scan = [] last_scan = 0 for fmri_data in fmri_data_sets: nscans = fmri_data.bold.shape[0] session_scan.append(np.arange(last_scan, last_scan + nscans, dtype=int)) last_scan += nscans # FIXME: define fmri_data instead of loop indice if fmri_data.simulation is not None: simu = [fmri_data.simulation[0] for fmri_data in fmri_data_sets] else: simu = None return FmriData( all_onsets, bold, fmri_data_sets[0].tr, session_scan, fmri_data_sets[0].roiMask, fmri_data_sets[0].graphs, durations, fmri_data_sets[0].meta_obj, simu, backgroundLabel=fmri_data_sets[0].backgroundLabel, data_files=fmri_data_sets[0].data_files, data_type=fmri_data_sets[0].data_type, edge_lengths=fmri_data_sets[0].edge_lengths, mask_loaded_from_file=fmri_data_sets[0].mask_loaded_from_file)
def from_surf_ui(self, sessions_data=[FMRISessionSurfacicData()], tr=DEFAULT_BOLD_SURF_TR, mask_file=DEFAULT_MASK_SURF_FILE, mesh_file=DEFAULT_MESH_FILE): """ Convenient creation function intended to be used for XML I/O. 'session_data' is a list of FMRISessionVolumicData objects. 'tr' is the time of repetition. 'mask_file' is a path to a functional mask file. This represents the following hierarchy: - FMRIData: - list of session data: [ * data for session 1: - onsets for session 1, - durations for session 1, - fmri data file for session 1 (gii) * data for session 2: - onsets for session 2, - durations for session 2, - fmri data file for session 2 (gii) ], - time of repetition - mask file - mesh file """ pyhrf.verbose(1,'Load surfacic data ...') sd = stack_trees([sd.to_dict() for sd in sessions_data]) onsets = sd['onsets'] durations = sd['durations'] bold_files = sd['bold_file'] if isinstance(durations,list) and durations[0] is None: durations = None m, mmo, mlf, b, ss, g, el = load_surf_bold_mask(bold_files, mesh_file, mask_file) mask = m mask_meta_obj = mmo mask_loaded_from_file = mlf bold = b sessionScans = ss graphs = g edge_lengths = el fd = FmriData(onsets, bold, tr, sessionScans, mask, graphs, stimDurations=durations, meta_obj=mask_meta_obj, data_files=bold_files+[mask_file,mesh_file], data_type='surface', edge_lengths=edge_lengths) fd.set_init(FmriData.from_surf_ui, sessions_data=sessions_data, tr=tr, mask_file=mask_file, mesh_file=mesh_file) return fd
def from_surf_ui(cls, sessions_data=None, tr=DEFAULT_BOLD_SURF_TR, mask_file=DEFAULT_MASK_SURF_FILE, mesh_file=DEFAULT_MESH_FILE): """ Convenient creation function intended to be used for XML I/O. 'session_data' is a list of FMRISessionVolumicData objects. 'tr' is the time of repetition. 'mask_file' is a path to a functional mask file. This represents the following hierarchy: .. code:: - FMRIData: - list of session data: [ * data for session 1: - onsets for session 1, - durations for session 1, - fmri data file for session 1 (gii) * data for session 2: - onsets for session 2, - durations for session 2, - fmri data file for session 2 (gii) ], - time of repetition - mask file - mesh file """ if sessions_data is None: sessions_data = [FMRISessionSurfacicData()] logger.info('Load surfacic data...') sda = stack_trees([sda.to_dict() for sda in sessions_data]) onsets = sda['onsets'] durations = sda['durations'] bold_files = sda['bold_file'] if isinstance(durations, list) and durations[0] is None: durations = None (mask, mask_meta_obj, _, bold, session_scans, graphs, edge_lengths) = load_surf_bold_mask(bold_files, mesh_file, mask_file) fmri_data = FmriData(onsets, bold, tr, session_scans, mask, graphs, stimDurations=durations, meta_obj=mask_meta_obj, data_files=bold_files + [mask_file, mesh_file], data_type='surface', edge_lengths=edge_lengths) fmri_data.set_init(FmriData.from_surf_ui, sessions_data=sessions_data, tr=tr, mask_file=mask_file, mesh_file=mesh_file) return fmri_data
def load_and_get_fdata_params(self, sessions_data, mask): params = stack_trees([sd.to_dict() for sd in sessions_data]) fns = params.pop('func_data_file') pyhrf.verbose(1, 'Load functional data from: %s' %',\n'.join(fns)) fdata = stack_cuboids([xndarray.load(fn) for fn in fns], 'session') fdata = np.concatenate(fdata.data) #scan sessions along time axis pio.discard_bad_data(fdata, mask) pyhrf.verbose(1, 'Functional data shape %s' %str(fdata.shape)) params['func_data'] = fdata return params
def setUp(self): def mixt(m, v, p): assert len(m) == len(v) assert len(m) == len(p) return { 'means': np.array(m), 'variances': np.array(v), 'props': np.array(p) } self.mixture_inactive = mixt([0., 0.], [1., 1.], [1., 0.]) self.mixture_active = mixt([0., 100.], [1., 1.], [0., 1.]) self.mixture_half = mixt([-1., 1.], [1., 1.], [.5, .5]) def stack_comp(l): return np.array(l).T self.mixt_stack = (self.mixture_inactive, self.mixture_active, self.mixture_half) self.mixt_map = stack_trees(self.mixt_stack, join_func=stack_comp) self.n_pos = len(self.mixt_stack)
def from_vol_ui(self, sessions_data=[FMRISessionVolumicData()], tr=DEFAULT_BOLD_VOL_TR, mask_file=DEFAULT_MASK_VOL_FILE, background_label=None): """ Convenient creation function intended to be used for XML I/O. 'session_data' is a list of FMRISessionVolumicData objects. 'tr' is the repetition time. 'mask_file' is a path to a functional mask file. This represents the following hierarchy: - FMRIData: - list of session data: [ * data for session 1: - onsets for session 1, - durations for session 1, - fmri data file for session 1 (nii) * data for session 2: - onsets for session 2, - durations for session 2, - fmri data file for session 2 (nii) ], - repetition time - mask file """ logger.info('Load volumic data ...') logger.info('Input sessions data:') logger.debug(sessions_data) sda = stack_trees([sda.to_dict() for sda in sessions_data]) onsets = sda['onsets'] durations = sda['durations'] bold_files = sda['bold_file'] # FIXME: HACKS!! if isinstance(onsets.values()[0][0], list): for i in xrange(len(onsets.keys())): onsets[onsets.keys()[i]] = onsets[onsets.keys()[i]][0] if isinstance(durations.values()[0][0], list): for i in xrange(len(durations.keys())): durations[durations.keys()[i]] = \ durations[durations.keys()[i]][0] if len(durations.values()[0][0].shape) > 1: for i in xrange(len(durations.keys())): durations.values()[i][0] = durations.values()[i][0][0] if len(onsets.keys()) == 1: onsets[onsets.keys()[0]] = onsets[onsets.keys()[0]][0] durations[onsets.keys()[0]] = durations[onsets.keys()[0]][0] if isinstance(durations, list) and durations[0] is None: durations = None mask, mmo, mlf, bold, session_scans = \ load_vol_bold_and_mask(bold_files, mask_file) mask_meta_obj = mmo mask_loaded_from_file = mlf fmri_data = FmriData(onsets, bold, tr, session_scans, mask, stimDurations=durations, meta_obj=mask_meta_obj, data_files=bold_files + [mask_file], backgroundLabel=background_label, data_type='volume', mask_loaded_from_file=mask_loaded_from_file) fmri_data.set_init(FmriData.from_vol_ui, sessions_data=sessions_data, tr=tr, mask_file=mask_file, background_label=background_label) return fmri_data
def from_surf_ui(cls, sessions_data=None, tr=DEFAULT_BOLD_SURF_TR, mask_file=DEFAULT_MASK_SURF_FILE, mesh_file=DEFAULT_MESH_FILE): """ Convenient creation function intended to be used for XML I/O. 'session_data' is a list of FMRISessionVolumicData objects. 'tr' is the time of repetition. 'mask_file' is a path to a functional mask file. This represents the following hierarchy: - FMRIData: - list of session data: [ * data for session 1: - onsets for session 1, - durations for session 1, - fmri data file for session 1 (gii) * data for session 2: - onsets for session 2, - durations for session 2, - fmri data file for session 2 (gii) ], - time of repetition - mask file - mesh file """ if sessions_data is None: sessions_data = [FMRISessionSurfacicData()] logger.info('Load surfacic data...') sda = stack_trees([sda.to_dict() for sda in sessions_data]) onsets = sda['onsets'] durations = sda['durations'] bold_files = sda['bold_file'] if isinstance(durations, list) and durations[0] is None: durations = None (mask, mask_meta_obj, _, bold, session_scans, graphs, edge_lengths) = load_surf_bold_mask(bold_files, mesh_file, mask_file) fmri_data = FmriData(onsets, bold, tr, session_scans, mask, graphs, stimDurations=durations, meta_obj=mask_meta_obj, data_files=bold_files + [mask_file, mesh_file], data_type='surface', edge_lengths=edge_lengths) fmri_data.set_init(FmriData.from_surf_ui, sessions_data=sessions_data, tr=tr, mask_file=mask_file, mesh_file=mesh_file) return fmri_data
def from_vol_ui(self, sessions_data=[FMRISessionVolumicData()], tr=DEFAULT_BOLD_VOL_TR, mask_file=DEFAULT_MASK_VOL_FILE, background_label=None): """ Convenient creation function intended to be used for XML I/O. 'session_data' is a list of FMRISessionVolumicData objects. 'tr' is the repetition time. 'mask_file' is a path to a functional mask file. This represents the following hierarchy: - FMRIData: - list of session data: [ * data for session 1: - onsets for session 1, - durations for session 1, - fmri data file for session 1 (nii) * data for session 2: - onsets for session 2, - durations for session 2, - fmri data file for session 2 (nii) ], - repetition time - mask file """ pyhrf.verbose(1,'Load volumic data ...') pyhrf.verbose(3,'Input sessions data:') pyhrf.verbose.printDict(3,sessions_data) #print 'sessions_data:', sessions_data #print 'Session Data:', [sd.to_dict() for sd in sessions_data] # for sd in sessions_data: # print 'sd:', sd.to_dict() sd = stack_trees([sd.to_dict() for sd in sessions_data]) #print 'sd dict:', sd onsets = sd['onsets'] #print '~~~ onsets:', onsets durations = sd['durations'] #print '~~~ durations:', durations bold_files = sd['bold_file'] #print 'onsets!:', onsets.values()[0] #print type(onsets.values()[0][0])==list #print onsets[onsets.keys()[0]][0] #print 'durations!', durations #HACKS!! if type(onsets.values()[0][0])==list: for i in xrange(len(onsets.keys())): onsets[onsets.keys()[i]] = onsets[onsets.keys()[i]][0] if type(durations.values()[0][0])==list: for i in xrange(len(durations.keys())): durations[durations.keys()[i]] = durations[durations.keys()[i]][0] if len(durations.values()[0][0].shape)>1: for i in xrange(len(durations.keys())): durations.values()[i][0] = durations.values()[i][0][0] if len(onsets.keys())==1: onsets[onsets.keys()[0]] = onsets[onsets.keys()[0]][0] durations[onsets.keys()[0]] = durations[onsets.keys()[0]][0] if isinstance(durations,list) and durations[0] is None: durations = None m, mmo, mlf, b, ss = load_vol_bold_and_mask(bold_files, mask_file) mask = m mask_meta_obj = mmo mask_loaded_from_file = mlf bold = b sessionScans = ss #print onsets, durations fd = FmriData(onsets, bold, tr, sessionScans, mask, stimDurations=durations, meta_obj=mask_meta_obj, data_files=bold_files+[mask_file], backgroundLabel=background_label, data_type='volume', mask_loaded_from_file=mask_loaded_from_file) fd.set_init(FmriData.from_vol_ui, sessions_data=sessions_data, tr=tr, mask_file=mask_file, background_label=background_label) return fd
def outputResults_old(self, results, output_dir, filter='.\A', historyFlag=False): """ Return a dictionnary mapping output names to cuboid objects """ from pyhrf.boldsynth.spatialconfig import xndarrayMapper1D filter = re.compile(filter) pyhrf.verbose(1,'Building outputs ...') pyhrf.verbose(6, 'results :') pyhrf.verbose.printDict(6, results, exclude=['xmlHandler']) for i,r in enumerate(results): # print 'i:', i # print 'r:', r if r[1] is None: results.pop(i) nbROI = len(results) if nbROI == 0: #Nothing ... print 'No result found, everything crashed?' return None pyhrf.verbose(1, '%s ROI(s)' %(nbROI)) resultTree = {} roiMapperTree = {} target_shape = results[0][0].spatial_shape meta_data = results[0][0].meta_obj if len(target_shape) == 3: #Volumic data: targetAxes = MRI3Daxes#['axial','coronal', 'sagittal'] else: #surfacic targetAxes = ['voxel'] pyhrf.verbose(1,'Get each ROI output ...') for roiData,result,report in results: roiId = roiData.get_roi_id() roiMapper = xndarrayMapper1D(maskToMapping(roiData.roiMask==roiId), target_shape, targetAxes, 'voxel') if report == 'ok': if not isinstance(result, dict): outs = result.getOutputs() else: outs = result set_leaf(resultTree,[roiId], outs) set_leaf(roiMapperTree,[roiId], roiMapper) else: pyhrf.verbose(1, '-> Sampling crashed, roi %d!' %roiId) pyhrf.verbose(2, report) if len(resultTree) == 0: return None pyhrf.verbose(1,'Joining outputs ...') outputs = {} outputs = stack_trees(resultTree.values()) roiList = resultTree.keys() topop = [] for on, cubList in outputs.iteritems(): pyhrf.verbose(4, "Treating output: " + on + "...") if not filter.match(on): try: outputs[on] = self.joinOutputs(cubList, roiList, roiMapperTree) except Exception, e: print 'Could not join outputs for', on print 'Exception was:' print e topop.append(on) else: print on, 'filtered!'
def from_session_dict(self, d, sessionDurations=None): nd = stack_trees([d[sess] for sess in sorted(d.keys())]) return Paradigm(nd['onsets'], sessionDurations, nd['stimulusLength'])