Пример #1
0
def view_results(bolds, estimOutputs, betas):

    outputSimu = stack_trees(map(getSimulOutputs, bolds))
    for n,o in outputSimu.iteritems():
        outputSimu[n] = stack_cuboids(o, 'betaSimu', betas)
    views = dict(zip(outputSimu.keys(), map(xndarrayView,outputSimu.values())))
    

    outputEstim = stack_trees(estimOutputs)
    for n,o in outputEstim.iteritems():
        outputEstim[n] = stack_cuboids(o, 'betaSimu', betas)
    views.update(dict(zip(outputEstim.keys(), 
                          map(xndarrayView,outputEstim.values()))))
    ndview.multiView(views)
Пример #2
0
 def load_func_data(self, mask):
     # Load func data for all sessions and flatten them according to mask
     mask = mask != self.bg_label
     cfdata = [xndarray.load(f).flatten(mask, self.spatial_axes, 'voxel') \
               for f in self.func_files]
     # flatten along sessions:
     cfdata = stack_cuboids(cfdata, 'session').reorient(['session','time']+\
                                                        self.spatial_axes)
     return np.concatenate(cfdata.data)
Пример #3
0
    def make_outputs_single_subject(self, data_rois, irois, all_outputs,
                                    targetAxes, ext, meta_data, output_dir):

        coutputs = {}
        output_fns = []

        roi_masks = [(data_roi.roiMask != data_roi.backgroundLabel)
                     for data_roi in data_rois]
        np_roi_masks = [np.where(roi_mask) for roi_mask in roi_masks]

        for output_name, roi_outputs in all_outputs.iteritems():
            logger.info('Merge output %s ...', output_name)
            try:
                if roi_outputs[0].has_axis('voxel'):
                    logger.debug('Merge as expansion ...')
                    dest_c = None
                    for i_roi, c in enumerate(roi_outputs):
                        dest_c = c.expand(roi_masks[i_roi],
                                          'voxel', targetAxes,
                                          dest=dest_c, do_checks=False,
                                          m=np_roi_masks[i_roi])
                else:
                    logger.debug('Merge as stack (%d elements)...', len(irois))
                    c_to_stack = [roi_outputs[i] for i in np.argsort(irois)]
                    # print 'c_to_stack:', c_to_stack
                    # print 'sorted(irois):', sorted(irois)
                    dest_c = stack_cuboids(c_to_stack, domain=sorted(irois),
                                           axis='ROI')
            except Exception, e:
                print "Could not merge outputs for %s" % output_name
                print "Exception was:"
                print e
                # raise e #stop here
            if 0 and dest_c is not None:
                print '-> ', dest_c.data.shape
            output_fn = op.join(output_dir, output_name + ext)
            output_fn = add_prefix(output_fn, self.outPrefix)
            output_fns.append(output_fn)
            logger.debug('Save output %s to %s', output_name, output_fn)
            try:
                if dest_c.meta_data:
                    tmp_meta_data = (meta_data[0], meta_data[1].copy())
                    tmp_meta_data[1]["descrip"] = dest_c.meta_data[1]["descrip"]
                    dest_c.meta_data = tmp_meta_data
                    dest_c.save(output_fn, set_MRI_orientation=True)
                else:
                    dest_c.save(output_fn, meta_data=meta_data,
                                set_MRI_orientation=True)
            except Exception:
                print 'Could not save output "%s", error stack was:' \
                    % output_name
                exc_type, exc_value, exc_traceback = sys.exc_info()
                traceback.print_exception(exc_type, exc_value, exc_traceback,
                                          limit=4, file=sys.stdout)
            coutputs[output_name] = dest_c
Пример #4
0
    def joinOutputs(self, cuboids, roiIds, mappers):

        mapper0 = mappers[roiIds[0]]
        if mapper0.isExpendable(cuboids[0]):
            logger.info('Expanding ...')
            expandedxndarray = mapper0.expandxndarray(cuboids[0])
            for c, roi in zip(cuboids[1:], roiIds[1:]):
                mappers[roi].expandxndarray(c, dest=expandedxndarray)
            return expandedxndarray
        else:
            return stack_cuboids(cuboids, 'ROI', roiIds)
Пример #5
0
    def joinOutputs(self, cuboids, roiIds, mappers):

        mapper0 = mappers[roiIds[0]]
        if mapper0.isExpendable(cuboids[0]):
            logger.info('Expanding ...')
            expandedxndarray = mapper0.expandxndarray(cuboids[0])
            for c, roi in zip(cuboids[1:], roiIds[1:]):
                mappers[roi].expandxndarray(c, dest=expandedxndarray)
            return expandedxndarray
        else:
            return stack_cuboids(cuboids, 'ROI', roiIds)
Пример #6
0
    def load_and_get_fdata_params(self, sessions_data, mask):
        params = stack_trees([sd.to_dict() for sd in sessions_data])

        fns = params.pop('func_data_file')
        pyhrf.verbose(1, 'Load functional data from: %s' %',\n'.join(fns))
        fdata = stack_cuboids([xndarray.load(fn) for fn in fns], 'session')

        fdata = np.concatenate(fdata.data) #scan sessions along time axis
        pio.discard_bad_data(fdata, mask)
        pyhrf.verbose(1, 'Functional data shape %s' %str(fdata.shape))
        params['func_data'] = fdata

        return params
Пример #7
0
    def load_and_get_fdata_params(self, mask):

        if op.splitext(self.paradigm_file)[-1] == '.csv':
            onsets, durations = pio.load_paradigm_from_csv(self.paradigm_file)
        else:
            raise Exception('Only CSV file format support for paradigm')

        fns = self.func_data_files
        pyhrf.verbose(1, 'Load functional data from: %s' %',\n'.join(fns))
        fdata = stack_cuboids([xndarray.load(fn) for fn in fns], 'session')

        fdata = np.concatenate(fdata.data) #scan sessions along time axis
        pio.discard_bad_data(fdata, mask)
        pyhrf.verbose(1, 'Functional data shape %s' %str(fdata.shape))

        return {'stim_onsets': onsets, 'stim_durations':durations,
                'func_data': fdata}
Пример #8
0
    def make_outputs_multi_subjects(self, data_rois, irois, all_outputs,
                                    targetAxes, ext, meta_data, output_dir):

        coutputs = {}
        output_fns = []

        roi_masks = [[(ds.roiMask != ds.backgroundLabel)
                      for ds in dr.data_subjects]
                     for dr in data_rois]
        #-> roi_masks[roi_id][subject]

        np_roi_masks = [[np.where(roi_subj_mask)
                         for roi_subj_mask in roi_subj_masks]
                        for roi_subj_masks in roi_masks]
        #-> np_roi_masks[roi_id][subject]

        for output_name, roi_outputs in all_outputs.iteritems():
            logger.info('Merge output %s ...', output_name)
            dest_c = {}
            try:
                if roi_outputs[0].has_axis('voxel'):
                    if not roi_outputs[0].has_axis('subject'):
                        raise Exception('Voxel-mapped output "%s" does not'
                                        'have a subject axis')
                    logger.debug('Merge as expansion ...')
                    dest_c_tmp = None
                    for isubj in roi_outputs[0].get_domain('subject'):
                        for i_roi, c in enumerate(roi_outputs):
                            m = np_roi_masks[i_roi][isubj]
                            dest_c_tmp = c.expand(roi_masks[i_roi][isubj],
                                                  'voxel', targetAxes,
                                                  dest=dest_c_tmp,
                                                  do_checks=False,
                                                  m=m)
                    dest_c[output_name] = dest_c_tmp
                else:
                    logger.debug('Merge as stack (%d elements)...', len(irois))
                    c_to_stack = [roi_outputs[i] for i in np.argsort(irois)]
                    dest_c[output_name] = stack_cuboids(c_to_stack,
                                                        domain=sorted(irois),
                                                        axis='ROI')
            except Exception, e:
                logger.error("Could not merge outputs for %s", output_name)
                logger.error("Exception was:")
                logger.error(e)

            for output_name, c in dest_c.iteritems():
                output_fn = op.join(output_dir, output_name + ext)
                output_fn = add_prefix(output_fn, self.outPrefix)
                output_fns.append(output_fn)
                logger.debug('Save output %s to %s', output_name, output_fn)
                try:
                    c.save(output_fn, meta_data=meta_data,
                           set_MRI_orientation=True)
                except Exception:
                    print 'Could not save output "%s", error stack was:' \
                        % output_name
                    exc_type, exc_value, exc_traceback = sys.exc_info()
                    traceback.print_exception(exc_type, exc_value,
                                              exc_traceback,
                                              limit=4, file=sys.stdout)
                coutputs[output_name] = c
Пример #9
0
    def analyse_roi(self, fdata):
        pyhrf.verbose(1, 'Run GLM analysis (ROI %d) ...' %fdata.get_roi_id())

        if self.rescale_factor is not None:
            m = np.where(fdata.roiMask)
            rescale_factor = self.rescale_factor[:,m[0],m[1],m[2]]
        else:
            rescale_factor = None

        glm, dm, cons = glm_nipy(fdata, contrasts=self.contrasts,
                                 hrf_model=self.hrf_model,
                                 drift_model=self.drift_model,
                                 hfcut=self.hfcut,
                                 residuals_model=self.residuals_model,
                                 fit_method=self.fit_method,
                                 fir_delays=self.fir_delays,
                                 rescale_results=self.rescale_results,
                                 rescale_factor=rescale_factor)

        outputs = {}

        ns, nr = dm.matrix.shape
        tr = fdata.tr
        if rescale_factor is not None:
            #same sf for all voxels
            dm.matrix[:,:rescale_factor.shape[0]] /= rescale_factor[:,0]

        cdesign_matrix = xndarray(dm.matrix,
                                axes_names=['time','regressor'],
                                axes_domains={'time':np.arange(ns)*tr,
                                              'regressor':dm.names})
        outputs['design_matrix'] = cdesign_matrix

        axes_names = ['time', 'voxel']
        axes_domains = {'time' : np.arange(ns)*tr}
        bold = xndarray(fdata.bold.astype(np.float32),
                      axes_names=axes_names,
                      axes_domains=axes_domains,
                      value_label='BOLD')

        fit = np.dot(dm.matrix, glm.beta)
        cfit = xndarray(fit, axes_names=['time','voxel'],
                      axes_domains={'time':np.arange(ns)*tr})

        outputs['bold_fit'] = stack_cuboids([bold,cfit], 'stype', ['bold', 'fit'])


        nb_cond = fdata.nbConditions
        fit_cond = np.dot(dm.matrix[:,:nb_cond], glm.beta[:nb_cond,:])
        fit_cond -= fit_cond.mean(0)
        fit_cond += fdata.bold.mean(0)

        outputs['fit_cond'] = xndarray(fit_cond, axes_names=['time','voxel'],
                                     axes_domains={'time':np.arange(ns)*tr})


        outputs['s2'] = xndarray(glm.s2, axes_names=['voxel'])


        if 0:
            cbeta = xndarray(glm.beta, axes_names=['reg_name','voxel'],
                           axes_domains={'reg_name':dm.names})

            outputs['beta'] = cbeta
        else:
            if self.hrf_model == 'FIR':
                fir = dict((d * fdata.tr, OrderedDict()) for d in self.fir_delays)
            for ib, bname in enumerate(dm.names):
                outputs['beta_' + bname] = xndarray(glm.beta[ib],
                                                  axes_names=['voxel'])
                if self.hrf_model == 'FIR' and 'delay' in bname:
                    #reconstruct filter:
                    cond, delay = bname.split('_delay_')
                    delay = int(delay) * fdata.tr
                    fir[delay][cond] = xndarray(glm.beta[ib], axes_names=['voxel'])

            if self.hrf_model == 'FIR':
                chrf = tree_to_cuboid(fir, ['time', 'condition'])
                outputs['hrf'] = chrf
                outputs['hrf_norm'] = (chrf**2).sum('time')**.5

            for cname, con in cons.iteritems():
                #print 'con:'
                #print dir(con)
                outputs['con_effect_'+cname] = xndarray(con.effect,
                                                      axes_names=['voxel'])

                #print '%%%%%%% con.variance:', con.variance.shape
                ncon = con.effect / con.variance.std()
                outputs['ncon_effect_'+cname] = xndarray(ncon, axes_names=['voxel'])

                outputs['con_pvalue_'+cname] = xndarray(con.pvalue(self.con_bl),
                                                      axes_names=['voxel'])


        roi_lab_vol = np.zeros(fdata.get_nb_vox_in_mask(), dtype=np.int32) + \
            fdata.get_roi_id()

        outputs['mask'] = xndarray(roi_lab_vol, axes_names=['voxel'])

        # for ib, bname in enumerate(design_matrix.names):
        #     beta_vol = expand_array_in_mask(my_glm.beta[ib], mask_array)
        #     beta_image = Nifti1Image(beta_vol, affine)
        #     beta_file = op.join(output_dir, 'beta_%s.nii' %bname)
        #     save(beta_image, beta_file)
        #     beta_files.append(beta_file)


        return outputs
Пример #10
0
    def getGlobalOutputs(self):
        outputs = {}
        axes_domains = {
            'time': np.arange(self.dataInput.ny) * self.dataInput.tr
        }
        if pyhrf.__usemode__ == pyhrf.DEVEL:
            # output of design matrix:
            dMat = np.zeros_like(self.dataInput.varX[0, :, :])
            for ic, vx in enumerate(self.dataInput.varX):
                dMat += vx * (ic + 1)

            outputs['matX'] = xndarray(dMat,
                                       axes_names=['time', 'P'],
                                       axes_domains=axes_domains,
                                       value_label='value')

            ad = axes_domains.copy()
            ad['condition'] = self.dataInput.cNames
            outputs['varX'] = xndarray(self.dataInput.varX.astype(np.int8),
                                       axes_names=['condition', 'time', 'P'],
                                       axes_domains=ad,
                                       value_label='value')
        if self.output_fit:
            try:
                fit = self.computeFit()
                if self.dataInput.varMBY.ndim == 2:
                    axes_names = ['time', 'voxel']
                else:  # multisession
                    axes_names = ['session', 'time', 'voxel']
                bold = xndarray(self.dataInput.varMBY.astype(np.float32),
                                axes_names=axes_names,
                                axes_domains=axes_domains,
                                value_label='BOLD')

                # TODO: outputs of paradigm
                # outputs of onsets, per condition:
                # get binary sequence sampled at TR
                # build a xndarray from it
                # build time axis values

                cfit = xndarray(fit.astype(np.float32),
                                axes_names=axes_names,
                                axes_domains=axes_domains,
                                value_label='BOLD')
                # if self.dataInput.simulData is not None:

                # s = xndarray(self.dataInput.simulData.stimInduced,
                # axes_names=axes_names,
                # axes_domains=axes_domains,
                # value_label='BOLD')

                # outputs['fit'] = stack_cuboids([s,cfit], 'type',
                # ['simu', 'fit'])
                # else:
                outputs['bold_fit'] = stack_cuboids([bold, cfit], 'stype',
                                                    ['bold', 'fit'])

                # e = np.sqrt((fit.astype(np.float32) - \
                #             self.dataInput.varMBY.astype(np.float32))**2)
                # outputs['error'] = xndarray(e, axes_names=axes_names,
                #                            axes_domains=axes_domains,
                #                            value_label='Error')
                # outputs['rmse'] = xndarray(e.mean(0), axes_names=['voxel'],
                #                            value_label='Rmse')

            except NotImplementedError:
                print 'Compute fit not implemented !'
                pass

        return outputs
Пример #11
0
    def getOutputs(self):

        # Backward compatibilty with older results
        if hasattr(self, "axesNames"):
            self.axes_names = self.axesNames
        if hasattr(self, "axesDomains"):
            self.axes_domains = self.axesDomains
        if hasattr(self, "valueLabel"):
            self.value_label = self.valueLabel

        outputs = {}
        if self.axes_names is None:
            # print ' "%s" -> no axes_names defined' %self.name
            sh = (1,) if np.isscalar(self.finalValue) else self.finalValue.shape
            # an = ['axis%d'%i for i in xrange(self.finalValue.ndim)]
            an = ["axis%d" % i for i in xrange(len(sh))]
        else:
            an = self.axes_names

        if self.meanHistory is not None:
            outName = self.name + "_pm_history"
            if hasattr(self, "obsHistoryIts"):
                axes_domains = {"iteration": self.obsHistoryIts}
            else:
                axes_domains = {}
            axes_domains.update(self.axes_domains)

            axes_names = ["iteration"] + an
            outputs[outName] = xndarray(
                self.meanHistory, axes_names=axes_names, axes_domains=axes_domains, value_label=self.value_label
            )

        if hasattr(self, "smplHistory") and self.smplHistory is not None:
            axes_names = ["iteration"] + an
            outName = self.name + "_smpl_history"
            if hasattr(self, "smplHistoryIts"):
                axes_domains = {"iteration": self.smplHistoryIts}
            else:
                axes_domains = {}
            axes_domains.update(self.axes_domains)
            outputs[outName] = xndarray(
                self.smplHistory, axes_names=axes_names, axes_domains=axes_domains, value_label=self.value_label
            )

            if hasattr(self, "autocorrelation"):
                outName = self.name + "_smpl_autocorr"
                axes_names = ["lag"] + an
                outputs[outName] = xndarray(
                    self.autocorrelation, axes_names=axes_names, axes_domains=self.axes_domains, value_label="acorr"
                )

                outName = self.name + "_smpl_autocorr_test"
                outputs[outName] = xndarray(
                    self.autocorrelation_test,
                    axes_names=axes_names,
                    axes_domains=self.axes_domains,
                    value_label="acorr",
                )

                outName = self.name + "_smpl_autocorr_pval"
                outputs[outName] = xndarray(
                    self.autocorrelation_pvalue,
                    axes_names=axes_names,
                    axes_domains=self.axes_domains,
                    value_label="pvalue",
                )

                outName = self.name + "_smpl_autocorr_thresh"
                outputs[outName] = xndarray(np.array([self.autocorrelation_thresh]), value_label="acorr")

            if hasattr(self, "median"):
                outName = self.name + "_post_median"
                outputs[outName] = xndarray(
                    self.median, axes_names=self.axes_names, axes_domains=self.axes_domains, value_label="median"
                )

        # print "Var name", self.name
        # print "Final value", self.finalValue

        pyhrf.verbose(4, "%s final value:" % self.name)
        pyhrf.verbose.printNdarray(4, self.finalValue)
        if 1 and hasattr(self, "error"):
            err = self.error ** 0.5
        else:
            err = None

        c = xndarray(
            self.get_final_value(),
            axes_names=self.axes_names,
            axes_domains=self.axes_domains,
            value_label=self.value_label,
        )

        # if 'hrf' in self.name:
        #     fv = self.get_final_value()
        #     print 'hrf norms:'
        #     if fv.ndim > 1:
        #         for s in xrange(fv.shape[0]):
        #             print (fv[s]**2).sum()**.5
        #     else:
        #         print (fv**2).sum()**.5

        if self.trueValue is not None:
            c_true = xndarray(
                np.array(self.get_true_value()),
                axes_names=self.axes_names,
                axes_domains=self.axes_domains,
                value_label=self.value_label,
            )

            c = stack_cuboids([c, c_true], axis="type", domain=["estim", "true"], axis_pos="last")

        outputs[self.name + "_pm"] = c

        if 0 and ((err is not None) or ((err.size == 1) and (err != 0))):
            c_err = xndarray(
                np.array(err), axes_names=self.axes_names, axes_domains=self.axes_domains, value_label=self.value_label
            )
            # c = stack_cuboids([c,c_err], axis='error', domain=['value','std'],
            #                   axis_pos='last')
            outputs[self.name + "_pm_std"] = c_err

        if hasattr(self, "tracked_quantities"):
            for qname, q in self.tracked_quantities.iteritems():
                outputs[qname] = q.to_cuboid()

        if len(self.report_check_ft_val) > 0:
            r = self.report_check_ft_val
            outputs[self.name + "_abs_err"] = xndarray(
                r["abs_error"], axes_names=self.axes_names, axes_domains=self.axes_domains
            )

            outputs[self.name + "_rel_err"] = xndarray(
                r["rel_error"], axes_names=self.axes_names, axes_domains=self.axes_domains
            )
            # on = self.name+'_inaccuracies'
            # an = r['is_accurate'][0]
            # ad = {}
            # if an is not None:
            # ad = dict((a,self.axes_domains[a]) \
            # for a in an if self.axes_domains.has_key(a))
            # outputs[on] = xndarray(np.bitwise_not(r['accuracy'][1]).astype(np.int8),
            # axes_names=an, axes_domains=ad)

        return outputs
Пример #12
0
    def getOutputs(self):

        # Backward compatibilty with older results
        if hasattr(self, 'axesNames'):
            self.axes_names = self.axesNames
        if hasattr(self, 'axesDomains'):
            self.axes_domains = self.axesDomains
        if hasattr(self, 'valueLabel'):
            self.value_label = self.valueLabel

        outputs = {}
        if self.axes_names is None:
            # print ' "%s" -> no axes_names defined' %self.name
            sh = (1, ) if np.isscalar(
                self.finalValue) else self.finalValue.shape
            #an = ['axis%d'%i for i in xrange(self.finalValue.ndim)]
            an = ['axis%d' % i for i in xrange(len(sh))]
        else:
            an = self.axes_names

        if self.meanHistory is not None:
            outName = self.name + '_pm_history'
            if hasattr(self, 'obsHistoryIts'):
                axes_domains = {'iteration': self.obsHistoryIts}
            else:
                axes_domains = {}
            axes_domains.update(self.axes_domains)

            axes_names = ['iteration'] + an
            outputs[outName] = xndarray(self.meanHistory,
                                        axes_names=axes_names,
                                        axes_domains=axes_domains,
                                        value_label=self.value_label)

        if hasattr(self, 'smplHistory') and self.smplHistory is not None:
            axes_names = ['iteration'] + an
            outName = self.name + '_smpl_history'
            if hasattr(self, 'smplHistoryIts'):
                axes_domains = {'iteration': self.smplHistoryIts}
            else:
                axes_domains = {}
            axes_domains.update(self.axes_domains)
            outputs[outName] = xndarray(self.smplHistory,
                                        axes_names=axes_names,
                                        axes_domains=axes_domains,
                                        value_label=self.value_label)

            if hasattr(self, 'autocorrelation'):
                outName = self.name + '_smpl_autocorr'
                axes_names = ['lag'] + an
                outputs[outName] = xndarray(self.autocorrelation,
                                            axes_names=axes_names,
                                            axes_domains=self.axes_domains,
                                            value_label='acorr')

                outName = self.name + '_smpl_autocorr_test'
                outputs[outName] = xndarray(self.autocorrelation_test,
                                            axes_names=axes_names,
                                            axes_domains=self.axes_domains,
                                            value_label='acorr')

                outName = self.name + '_smpl_autocorr_pval'
                outputs[outName] = xndarray(self.autocorrelation_pvalue,
                                            axes_names=axes_names,
                                            axes_domains=self.axes_domains,
                                            value_label='pvalue')

                outName = self.name + '_smpl_autocorr_thresh'
                outputs[outName] = xndarray(np.array(
                    [self.autocorrelation_thresh]),
                                            value_label='acorr')

            if hasattr(self, 'median'):
                outName = self.name + '_post_median'
                outputs[outName] = xndarray(self.median,
                                            axes_names=self.axes_names,
                                            axes_domains=self.axes_domains,
                                            value_label='median')

        logger.info('%s final value:', self.name)
        logger.info(self.finalValue)
        if 1 and hasattr(self, 'error'):
            err = self.error**.5
        else:
            err = None

        c = xndarray(self.get_final_value().astype(np.float32),
                     axes_names=self.axes_names,
                     axes_domains=self.axes_domains,
                     value_label=self.value_label)

        if self.trueValue is not None:
            c_true = xndarray(np.array(self.get_true_value()),
                              axes_names=self.axes_names,
                              axes_domains=self.axes_domains,
                              value_label=self.value_label)

            c = stack_cuboids([c, c_true],
                              axis='type',
                              domain=['estim', 'true'],
                              axis_pos='last')

        outputs[self.name + '_pm'] = c

        if ((err is not None) or ((err.size == 1) and (err != 0))):
            c_err = xndarray(self.error.astype(np.float32),
                             axes_names=self.axes_names,
                             axes_domains=self.axes_domains,
                             value_label=self.value_label)
            # c = stack_cuboids([c,c_err], axis='error', domain=['value','std'],
            #                   axis_pos='last')
            outputs[self.name + '_mcmc_var'] = c_err

        if hasattr(self, 'tracked_quantities'):
            for qname, q in self.tracked_quantities.iteritems():
                outputs[qname] = q.to_cuboid()

        if len(self.report_check_ft_val) > 0:
            r = self.report_check_ft_val
            outputs[self.name + '_abs_err'] = xndarray(
                r['abs_error'],
                axes_names=self.axes_names,
                axes_domains=self.axes_domains)

            outputs[self.name + '_rel_err'] = xndarray(
                r['rel_error'],
                axes_names=self.axes_names,
                axes_domains=self.axes_domains)
            on = self.name + '_inaccuracies'
            an = r['accuracy'][0]
            ad = {}
            if an is not None:
                ad = dict((a, self.axes_domains[a]) for a in an
                          if self.axes_domains.has_key(a))
            inacc = np.bitwise_not(r['accuracy'][1]).astype(np.int8)
            outputs[on] = xndarray(inacc, axes_names=an, axes_domains=ad)

        return outputs
Пример #13
0
    def getOutputs(self):
        outputs = {}

        npos = self.bold.shape[1]

        if self.avg_bold:
            pvals = np.repeat(self.Pvalues, self.nbVoxels, axis=1)

        # outputs['pvalue'] = xndarray(pvals.astype(np.float32),
        # axes_names=['condition','voxel'],
        # axes_domains={'condition':self.condition_names})

        hrf_time_axis = np.arange(self.ResponseFunctionsEvaluated.shape[1]) * \
            self.DeltaT
        if self.compute_pct_change:
            hrf_pct_change = self.ResponseFunctionsEvaluated_PctChgt.astype(
                float32)
            if self.avg_bold:
                hrf_pct_change = np.repeat(hrf_pct_change,
                                           self.nbVoxels,
                                           axis=2)
            chpc = xndarray(hrf_pct_change,
                            axes_names=['condition', 'time', 'voxel'],
                            axes_domains={
                                'condition': self.condition_names,
                                'time': hrf_time_axis
                            })
            # hrf_pct_change_errors = self.StdValEvaluated_PctChgt.astype(float32)
            # chpc_errors = xndarray(hrf_pct_change_errors,
            #                      axes_names=['condition','time','voxel'])

            # c = stack_cuboids([chpc, chpc_errors], axis='error',
            #                   domain=['value','error'], axis_pos='last')

            # outputs['ehrf_prct_bold_change'] = c
            outputs['ehrf_prct_bold_change'] = chpc

        rfe = self.ResponseFunctionsEvaluated
        if self.avg_bold:
            rfe = np.repeat(rfe, self.nbVoxels, axis=2)

        ch = xndarray(rfe.astype(float32),
                      axes_names=['condition', 'time', 'voxel'],
                      axes_domains={
                          'condition': self.condition_names,
                          'time': hrf_time_axis
                      })

        ch_errors = xndarray(self.StdValEvaluated.astype(float32),
                             axes_names=['condition', 'time', 'voxel'])
        outputs['ehrf_error'] = ch_errors

        # c = stack_cuboids([ch, ch_errors], axis='error',
        #                   domain=['value','error'], axis_pos='last')

        # outputs['ehrf'] = c
        if 0:
            print 'ehrf:',
            print ch.descrip()
            print 'ehrf for cond 0', ch.data[0, :, :].shape
            for ih in xrange(ch.data.shape[2]):
                print array2string(ch.data[0, :, ih])

        outputs['ehrf'] = ch

        ad = {'condition': self.condition_names}
        outputs['ehrf_norm'] = xndarray((rfe**2).sum(1)**.5,
                                        axes_names=['condition', 'voxel'],
                                        axes_domains=ad)

        se = self.SignalEvaluated
        if self.avg_bold:
            se = np.repeat(se, self.nbVoxels, axis=1)

        if self.output_fit:
            cfit = xndarray(se.astype(float32), axes_names=['time', 'voxel'])
            bold = self.bold
            if self.avg_bold:
                bold = np.repeat(bold, self.nbVoxels, axis=1)

            cbold = xndarray(self.bold.astype(np.float32),
                             axes_names=['time', 'voxel'])

            outputs['fit'] = stack_cuboids([cfit, cbold],
                                           axis="type",
                                           domain=['fit', 'bold'])

            fit_error = sqrt(
                (self.SignalEvaluated - self.bold)**2).astype(float32)
            if self.avg_bold:
                fit_error = np.repeat(fit_error, self.nbVoxels, axis=1)
                outputs['fit_error'] = xndarray(fit_error,
                                                axes_names=['time', 'voxel'])

        # save matrix X
        outputs['matX'] = xndarray(self.X[0].astype(float32),
                                   axes_names=['cond', 'time', 'P'],
                                   value_label='value')

        # print 'self.P[0].transpose().shape:', self.P[0].transpose().shape
        # print 'self.bold.shape:', self.bold.shape
        #l = dot(self.P[0].transpose(), self.bold)

        #fu = (self.SignalEvaluated - dot(self.P[0],l)).astype(float32)
        # if self.avg_bold:
        #fu = np.repeat(fu, self.nbVoxels, axis=1)
        #outputs['fit_undrift'] = xndarray(fu, axes_names=['time','voxel'])

        outputs['drift'] = xndarray(dot(self.P[0], self.l[0]).astype(float32),
                                    axes_names=['time', 'voxel'])

        rmse = sqrt(
            (self.SignalEvaluated - self.bold)**2).mean(0).astype(float32)
        if self.avg_bold:
            rmse = np.repeat(rmse, self.nbVoxels, axis=0)
        outputs['rmse'] = xndarray(rmse, axes_names=['voxel'])

        if self.save_history:
            h = self.ResponseFunctionsEvaluated
            print 'h.shape:', h.shape
            sh = (self.nbIt, ) + h.shape
            print 'sh:', sh
            h_history = np.zeros(sh, dtype=np.float32)
            for POI in xrange(npos):
                # print 'h_history[:,:,:,POI]:', h_history[:,:,:,POI].shape
                # print 'np.array(self.history["h"][POI]):',
                # np.array(self.history['h'][POI]).shape
                h_history[:, :, :, POI] = np.array(self.history['h'][POI])
            if self.avg_bold:
                h_history = np.repeat(h_history, self.nbVoxels, axis=3)
            outputs['ehrf_history'] = xndarray(
                h_history,
                axes_names=['iteration', 'condition', 'time', 'voxel'])

        return outputs
Пример #14
0
    def getOutputs(self):
        outputs = {}

        npos = self.bold.shape[1]

        if self.avg_bold:
            pvals = np.repeat(self.Pvalues, self.nbVoxels, axis=1)

        #outputs['pvalue'] = xndarray(pvals.astype(np.float32),
                                   #axes_names=['condition','voxel'],
                                   #axes_domains={'condition':self.condition_names})

        hrf_time_axis = np.arange(self.ResponseFunctionsEvaluated.shape[1]) * \
            self.DeltaT
        if self.compute_pct_change:
            hrf_pct_change = self.ResponseFunctionsEvaluated_PctChgt.astype(float32)
            if self.avg_bold:
                hrf_pct_change = np.repeat(hrf_pct_change, self.nbVoxels,
                                           axis=2)
            chpc = xndarray(hrf_pct_change, axes_names=['condition','time', 'voxel'],
                          axes_domains={'condition':self.condition_names,
                                        'time':hrf_time_axis})
        # hrf_pct_change_errors = self.StdValEvaluated_PctChgt.astype(float32)
        # chpc_errors = xndarray(hrf_pct_change_errors,
        #                      axes_names=['condition','time','voxel'])

        # c = stack_cuboids([chpc, chpc_errors], axis='error',
        #                   domain=['value','error'], axis_pos='last')

        # outputs['ehrf_prct_bold_change'] = c
            outputs['ehrf_prct_bold_change'] = chpc

        rfe = self.ResponseFunctionsEvaluated
        if self.avg_bold:
            rfe = np.repeat(rfe, self.nbVoxels, axis=2)

        ch = xndarray(rfe.astype(float32),
                      axes_names=['condition','time','voxel'],
                      axes_domains={'condition':self.condition_names,
                                    'time':hrf_time_axis})

        ch_errors = xndarray(self.StdValEvaluated.astype(float32),
                             axes_names=['condition','time','voxel'])
        outputs['ehrf_error'] = ch_errors

        # c = stack_cuboids([ch, ch_errors], axis='error',
        #                   domain=['value','error'], axis_pos='last')

        # outputs['ehrf'] = c
        if 0:
            print 'ehrf:',
            print ch.descrip()
            print 'ehrf for cond 0', ch.data[0,:,:].shape
            for ih in xrange(ch.data.shape[2]):
                print array2string(ch.data[0,:,ih])

        outputs['ehrf'] = ch

        ad = {'condition':self.condition_names}
        outputs['ehrf_norm'] = xndarray((rfe**2).sum(1)**.5,
                                        axes_names=['condition','voxel'],
                                        axes_domains=ad)


        se = self.SignalEvaluated
        if self.avg_bold:
            se = np.repeat(se, self.nbVoxels, axis=1)

        if self.output_fit:
            cfit = xndarray(se.astype(float32),
                            axes_names=['time','voxel'])
            bold = self.bold
            if self.avg_bold:
                bold = np.repeat(bold, self.nbVoxels, axis=1)

            cbold = xndarray(self.bold.astype(np.float32),
                             axes_names=['time','voxel'])

            outputs['fit'] = stack_cuboids([cfit, cbold], axis="type",
                                           domain=['fit','bold'])

            fit_error = sqrt((self.SignalEvaluated-self.bold)**2).astype(float32)
            if self.avg_bold:
                fit_error = np.repeat(fit_error, self.nbVoxels, axis=1)
                outputs['fit_error'] = xndarray(fit_error,
                                                axes_names=['time','voxel'])

        #save matrix X
        outputs['matX'] = xndarray(self.X[0].astype(float32),
                                   axes_names=['cond', 'time','P'],
                                   value_label='value')

        #print 'self.P[0].transpose().shape:', self.P[0].transpose().shape
        #print 'self.bold.shape:', self.bold.shape
        #l = dot(self.P[0].transpose(), self.bold)

        #fu = (self.SignalEvaluated - dot(self.P[0],l)).astype(float32)
        #if self.avg_bold:
            #fu = np.repeat(fu, self.nbVoxels, axis=1)
        #outputs['fit_undrift'] = xndarray(fu, axes_names=['time','voxel'])



        outputs['drift'] = xndarray(dot(self.P[0],self.l[0]).astype(float32),
                                    axes_names=['time','voxel'])

        rmse = sqrt((self.SignalEvaluated-self.bold)**2).mean(0).astype(float32)
        if self.avg_bold:
            rmse = np.repeat(rmse, self.nbVoxels, axis=0)
        outputs['rmse'] = xndarray(rmse, axes_names=['voxel'])


        if self.save_history:
            h = self.ResponseFunctionsEvaluated
            print 'h.shape:', h.shape
            sh = (self.nbIt,) + h.shape
            print 'sh:', sh
            h_history = np.zeros(sh, dtype=np.float32)
            for POI in xrange(npos):
                # print 'h_history[:,:,:,POI]:', h_history[:,:,:,POI].shape
                # print 'np.array(self.history["h"][POI]):', np.array(self.history['h'][POI]).shape
                h_history[:,:,:,POI] = np.array(self.history['h'][POI])
            if self.avg_bold:
                h_history = np.repeat(h_history, self.nbVoxels, axis=3)
            outputs['ehrf_history'] = xndarray(h_history,
                                             axes_names=['iteration','condition',
                                                         'time','voxel'])

        return outputs
Пример #15
0
    def analyse_roi(self, fdata):
        logger.info('Run GLM analysis (ROI %d) ...', fdata.get_roi_id())

        if self.rescale_factor is not None:
            m = np.where(fdata.roiMask)
            rescale_factor = self.rescale_factor[:, m[0], m[1], m[2]]
        else:
            rescale_factor = None

        glm, dm, cons = glm_nipy(fdata,
                                 contrasts=self.contrasts,
                                 hrf_model=self.hrf_model,
                                 drift_model=self.drift_model,
                                 hfcut=self.hfcut,
                                 residuals_model=self.residuals_model,
                                 fit_method=self.fit_method,
                                 fir_delays=self.fir_delays,
                                 rescale_results=self.rescale_results,
                                 rescale_factor=rescale_factor)

        outputs = {}

        ns, nr = dm.matrix.shape
        tr = fdata.tr
        if rescale_factor is not None:
            # same sf for all voxels
            dm.matrix[:, :rescale_factor.shape[0]] /= rescale_factor[:, 0]

        cdesign_matrix = xndarray(dm.matrix,
                                  axes_names=['time', 'regressor'],
                                  axes_domains={
                                      'time': np.arange(ns) * tr,
                                      'regressor': dm.names
                                  })
        outputs['design_matrix'] = cdesign_matrix

        if self.output_fit:
            axes_names = ['time', 'voxel']
            axes_domains = {'time': np.arange(ns) * tr}
            bold = xndarray(fdata.bold.astype(np.float32),
                            axes_names=axes_names,
                            axes_domains=axes_domains,
                            value_label='BOLD')

            fit = np.dot(dm.matrix, glm.beta)
            cfit = xndarray(fit,
                            axes_names=['time', 'voxel'],
                            axes_domains={'time': np.arange(ns) * tr})

            outputs['bold_fit'] = stack_cuboids([bold, cfit], 'stype',
                                                ['bold', 'fit'])

            nb_cond = fdata.nbConditions
            fit_cond = np.dot(dm.matrix[:, :nb_cond], glm.beta[:nb_cond, :])
            fit_cond -= fit_cond.mean(0)
            fit_cond += fdata.bold.mean(0)

            outputs['fit_cond'] = xndarray(
                fit_cond,
                axes_names=['time', 'voxel'],
                axes_domains={'time': np.arange(ns) * tr})

        s2 = np.atleast_1d(glm.s2)
        outputs['s2'] = xndarray(s2, axes_names=['voxel'])

        if 0:
            cbeta = xndarray(glm.beta,
                             axes_names=['reg_name', 'voxel'],
                             axes_domains={'reg_name': dm.names})

            outputs['beta'] = cbeta
        else:
            if self.hrf_model == 'FIR':
                fir = dict(
                    (d * fdata.tr, OrderedDict()) for d in self.fir_delays)
            for ib, bname in enumerate(dm.names):
                outputs['beta_' + bname] = xndarray(glm.beta[ib],
                                                    axes_names=['voxel'])
                if self.hrf_model == 'FIR' and 'delay' in bname:
                    # reconstruct filter:
                    cond, delay = bname.split('_delay_')
                    delay = int(delay) * fdata.tr
                    fir[delay][cond] = xndarray(glm.beta[ib],
                                                axes_names=['voxel'])

            if self.hrf_model == 'FIR':
                chrf = tree_to_xndarray(fir, ['time', 'condition'])
                outputs['hrf'] = chrf
                outputs['hrf_norm'] = (chrf**2).sum('time')**.5

            for cname, con in cons.iteritems():
                # print 'con:'
                # print dir(con)
                outputs['con_effect_' + cname] = xndarray(con.effect,
                                                          axes_names=['voxel'])

                # print '%%%%%%% con.variance:', con.variance.shape
                ncon = con.effect / con.variance.std()
                outputs['ncon_effect_' + cname] = xndarray(
                    ncon, axes_names=['voxel'])

                outputs['con_pvalue_' + cname] = xndarray(con.pvalue(
                    self.con_bl),
                                                          axes_names=['voxel'])

        roi_lab_vol = np.zeros(fdata.get_nb_vox_in_mask(), dtype=np.int32) + \
            fdata.get_roi_id()

        outputs['mask'] = xndarray(roi_lab_vol, axes_names=['voxel'])

        # for ib, bname in enumerate(design_matrix.names):
        #     beta_vol = expand_array_in_mask(my_glm.beta[ib], mask_array)
        #     beta_image = Nifti1Image(beta_vol, affine)
        #     beta_file = op.join(output_dir, 'beta_%s.nii' %bname)
        #     save(beta_image, beta_file)
        #     beta_files.append(beta_file)

        return outputs
Пример #16
0
    def outputResults_back_compat(
        self,
        results,
        output_dir,
        filter='.\A',
    ):

        logger.warning('Content of result.pck seems outdated, consider '
                       'running the analysis again to update it.')

        if output_dir is None:
            return

        logger.info('Building outputs ...')
        logger.debug('results :')
        logger.debug(results)

        to_pop = []
        for i, r in enumerate(results[:]):
            roi_id, result, report = r
            if report != 'ok':
                logger.error('-> Sampling crashed, roi %d!', roi_id)
                logger.error(report)
                to_pop.insert(0, i)

            elif result is None:
                logger.error('-> Sampling crashed (result is None), roi %d!',
                             roi_id)
                to_pop.insert(0, i)

        for i in to_pop:
            results.pop(i)

        if len(results) == 0:
            logger.warning('No more result to treat. Did everything crash ?')
            return

        def load_any(fns, load_func):
            for f in fns:
                try:
                    return load_func(f)
                except Exception:
                    pass
            return None

        r = load_any(['roiMask.tex', 'roi_mask.tex', 'jde_roi_mask.tex'],
                     read_texture)
        if r is None:
            r = load_any(['roi_mask.nii', 'jde_roi_mask.nii'], read_volume)

        if r is None:
            raise Exception('Can not find mask data file in current dir')

        all_rois_mask, meta_data = r

        target_shape = all_rois_mask.shape

        if len(target_shape) == 3:  # Volumic data:
            targetAxes = MRI3Daxes  # ['axial','coronal', 'sagittal']
            ext = '.nii'
        else:  # surfacic
            targetAxes = ['voxel']
            ext = '.gii'

        def genzip(gens):
            while True:
                # TODO: handle dict ...
                yield [g.next() for g in gens]

        logger.info('Get each ROI output ...')
        # print 'roi outputs:'
        if hasattr(results[0][1], 'getOutputs'):
            gen_outputs = [r[1].getOutputs() for r in results]
        else:
            gen_outputs = [r[1].iteritems() for r in results]
        irois = [r[0] for r in results]

        for roi_outputs in genzip(gen_outputs):
            output_name = roi_outputs[0][0]
            logger.info('Merge output %s ...', output_name)
            if roi_outputs[0][1].has_axis('voxel'):
                logger.info('Merge as expansion ...')
                dest_c = None
                for iroi, output in zip(irois, roi_outputs):
                    _, c = output
                    if output_name == 'ehrf':
                        print 'Before expansion:'
                        print c.descrip()

                        hrfs = c.data[0, :, :]
                        print 'ehrf for cond 0', hrfs.shape
                        for ih in xrange(hrfs.shape[1]):
                            print np.array2string(hrfs[:, ih])

                        # print np.array2string(hrfs, precision=2)

                    roi_mask = (all_rois_mask == iroi)
                    dest_c = c.expand(roi_mask,
                                      'voxel',
                                      targetAxes,
                                      dest=dest_c)
                    if output_name == 'ehrf':
                        print 'After expansion:'
                        print dest_c.descrip()
                        m = np.where(roi_mask)
                        hrfs = dest_c.data[0, :, m[0], m[1], m[2]]
                        print 'ehrf for cond 0', hrfs.shape
                        for ih in xrange(hrfs.shape[0]):
                            print np.array2string(hrfs[ih, :])
            else:
                logger.info('Merge as stack (%d elements)...', len(irois))
                c_to_stack = [roi_outputs[i][1] for i in np.argsort(irois)]
                dest_c = stack_cuboids(c_to_stack,
                                       domain=sorted(irois),
                                       axis='ROI')

            output_fn = op.join(output_dir, output_name + ext)
            logger.info('Save output %s to %s', output_name, output_fn)
            try:
                dest_c.save(output_fn,
                            meta_data=meta_data,
                            set_MRI_orientation=True)
            except Exception:
                print 'Could not save output "%s", error stack was:' \
                    % output_name
                exc_type, exc_value, exc_traceback = sys.exc_info()
                traceback.print_exception(exc_type,
                                          exc_value,
                                          exc_traceback,
                                          limit=4,
                                          file=sys.stdout)
Пример #17
0
    def make_outputs_multi_subjects(self, data_rois, irois, all_outputs,
                                    targetAxes, ext, meta_data, output_dir):

        coutputs = {}
        output_fns = []

        roi_masks = [[(ds.roiMask != ds.backgroundLabel)
                      for ds in dr.data_subjects] for dr in data_rois]
        #-> roi_masks[roi_id][subject]

        np_roi_masks = [[
            np.where(roi_subj_mask) for roi_subj_mask in roi_subj_masks
        ] for roi_subj_masks in roi_masks]
        #-> np_roi_masks[roi_id][subject]

        for output_name, roi_outputs in all_outputs.iteritems():
            logger.info('Merge output %s ...', output_name)
            dest_c = {}
            try:
                if roi_outputs[0].has_axis('voxel'):
                    if not roi_outputs[0].has_axis('subject'):
                        raise Exception('Voxel-mapped output "%s" does not'
                                        'have a subject axis')
                    logger.debug('Merge as expansion ...')
                    dest_c_tmp = None
                    for isubj in roi_outputs[0].get_domain('subject'):
                        for i_roi, c in enumerate(roi_outputs):
                            m = np_roi_masks[i_roi][isubj]
                            dest_c_tmp = c.expand(roi_masks[i_roi][isubj],
                                                  'voxel',
                                                  targetAxes,
                                                  dest=dest_c_tmp,
                                                  do_checks=False,
                                                  m=m)
                    dest_c[output_name] = dest_c_tmp
                else:
                    logger.debug('Merge as stack (%d elements)...', len(irois))
                    c_to_stack = [roi_outputs[i] for i in np.argsort(irois)]
                    dest_c[output_name] = stack_cuboids(c_to_stack,
                                                        domain=sorted(irois),
                                                        axis='ROI')
            except Exception, e:
                logger.error("Could not merge outputs for %s", output_name)
                logger.error("Exception was:")
                logger.error(e)

            for output_name, c in dest_c.iteritems():
                output_fn = op.join(output_dir, output_name + ext)
                output_fn = add_prefix(output_fn, self.outPrefix)
                output_fns.append(output_fn)
                logger.debug('Save output %s to %s', output_name, output_fn)
                try:
                    c.save(output_fn,
                           meta_data=meta_data,
                           set_MRI_orientation=True)
                except Exception:
                    print 'Could not save output "%s", error stack was:' \
                        % output_name
                    exc_type, exc_value, exc_traceback = sys.exc_info()
                    traceback.print_exception(exc_type,
                                              exc_value,
                                              exc_traceback,
                                              limit=4,
                                              file=sys.stdout)
                coutputs[output_name] = c
Пример #18
0
    def make_outputs_single_subject(self, data_rois, irois, all_outputs,
                                    targetAxes, ext, meta_data, output_dir):

        coutputs = {}
        output_fns = []

        roi_masks = [(data_roi.roiMask != data_roi.backgroundLabel)
                     for data_roi in data_rois]
        np_roi_masks = [np.where(roi_mask) for roi_mask in roi_masks]

        for output_name, roi_outputs in all_outputs.iteritems():
            logger.info('Merge output %s ...', output_name)
            try:
                if roi_outputs[0].has_axis('voxel'):
                    logger.debug('Merge as expansion ...')
                    dest_c = None
                    for i_roi, c in enumerate(roi_outputs):
                        dest_c = c.expand(roi_masks[i_roi],
                                          'voxel',
                                          targetAxes,
                                          dest=dest_c,
                                          do_checks=False,
                                          m=np_roi_masks[i_roi])
                else:
                    logger.debug('Merge as stack (%d elements)...', len(irois))
                    c_to_stack = [roi_outputs[i] for i in np.argsort(irois)]
                    # print 'c_to_stack:', c_to_stack
                    # print 'sorted(irois):', sorted(irois)
                    dest_c = stack_cuboids(c_to_stack,
                                           domain=sorted(irois),
                                           axis='ROI')
            except Exception, e:
                print "Could not merge outputs for %s" % output_name
                print "Exception was:"
                print e
                # raise e #stop here
            if 0 and dest_c is not None:
                print '-> ', dest_c.data.shape
            output_fn = op.join(output_dir, output_name + ext)
            output_fn = add_prefix(output_fn, self.outPrefix)
            output_fns.append(output_fn)
            logger.debug('Save output %s to %s', output_name, output_fn)
            try:
                if dest_c.meta_data:
                    tmp_meta_data = (meta_data[0], meta_data[1].copy())
                    tmp_meta_data[1]["descrip"] = dest_c.meta_data[1][
                        "descrip"]
                    dest_c.meta_data = tmp_meta_data
                    dest_c.save(output_fn, set_MRI_orientation=True)
                else:
                    dest_c.save(output_fn,
                                meta_data=meta_data,
                                set_MRI_orientation=True)
            except Exception:
                print 'Could not save output "%s", error stack was:' \
                    % output_name
                exc_type, exc_value, exc_traceback = sys.exc_info()
                traceback.print_exception(exc_type,
                                          exc_value,
                                          exc_traceback,
                                          limit=4,
                                          file=sys.stdout)
            coutputs[output_name] = dest_c
Пример #19
0
    def outputResults_back_compat(self, results, output_dir, filter='.\A',):

        logger.warning('Content of result.pck seems outdated, consider '
                       'running the analysis again to update it.')

        if output_dir is None:
            return

        logger.info('Building outputs ...')
        logger.debug('results :')
        logger.debug(results)

        to_pop = []
        for i, r in enumerate(results[:]):
            roi_id, result, report = r
            if report != 'ok':
                logger.info('-> Sampling crashed, roi %d!', roi_id)
                logger.info(report)
                to_pop.insert(0, i)

            elif result is None:
                logger.info('-> Sampling crashed (result is None), roi %d!',
                            roi_id)
                to_pop.insert(0, i)

        for i in to_pop:
            results.pop(i)

        if len(results) == 0:
            logger.info('No more result to treat. Did everything crash ?')
            return

        def load_any(fns, load_func):
            for f in fns:
                try:
                    return load_func(f)
                except Exception:
                    pass
            return None

        r = load_any(['roiMask.tex', 'roi_mask.tex', 'jde_roi_mask.tex'],
                     read_texture)
        if r is None:
            r = load_any(['roi_mask.nii', 'jde_roi_mask.nii'], read_volume)

        if r is None:
            raise Exception('Can not find mask data file in current dir')

        all_rois_mask, meta_data = r

        target_shape = all_rois_mask.shape

        if len(target_shape) == 3:  # Volumic data:
            targetAxes = MRI3Daxes  # ['axial','coronal', 'sagittal']
            ext = '.nii'
        else:  # surfacic
            targetAxes = ['voxel']
            ext = '.gii'

        def genzip(gens):
            while True:
                # TODO: handle dict ...
                yield [g.next() for g in gens]

        logger.info('Get each ROI output ...')
        # print 'roi outputs:'
        if hasattr(results[0][1], 'getOutputs'):
            gen_outputs = [r[1].getOutputs() for r in results]
        else:
            gen_outputs = [r[1].iteritems() for r in results]
        irois = [r[0] for r in results]

        for roi_outputs in genzip(gen_outputs):
            output_name = roi_outputs[0][0]
            logger.info('Merge output %s ...', output_name)
            if roi_outputs[0][1].has_axis('voxel'):
                logger.info('Merge as expansion ...')
                dest_c = None
                for iroi, output in zip(irois, roi_outputs):
                    _, c = output
                    if output_name == 'ehrf':
                        print 'Before expansion:'
                        print c.descrip()

                        hrfs = c.data[0, :, :]
                        print 'ehrf for cond 0', hrfs.shape
                        for ih in xrange(hrfs.shape[1]):
                            print np.array2string(hrfs[:, ih])

                        # print np.array2string(hrfs, precision=2)

                    roi_mask = (all_rois_mask == iroi)
                    dest_c = c.expand(roi_mask, 'voxel', targetAxes,
                                      dest=dest_c)
                    if output_name == 'ehrf':
                        print 'After expansion:'
                        print dest_c.descrip()
                        m = np.where(roi_mask)
                        hrfs = dest_c.data[0, :, m[0], m[1], m[2]]
                        print 'ehrf for cond 0', hrfs.shape
                        for ih in xrange(hrfs.shape[0]):
                            print np.array2string(hrfs[ih, :])
            else:
                logger.info('Merge as stack (%d elements)...', len(irois))
                c_to_stack = [roi_outputs[i][1] for i in np.argsort(irois)]
                dest_c = stack_cuboids(c_to_stack, domain=sorted(irois),
                                       axis='ROI')

            output_fn = op.join(output_dir, output_name + ext)
            logger.info('Save output %s to %s', output_name, output_fn)
            try:
                dest_c.save(output_fn, meta_data=meta_data,
                            set_MRI_orientation=True)
            except Exception:
                print 'Could not save output "%s", error stack was:' \
                    % output_name
                exc_type, exc_value, exc_traceback = sys.exc_info()
                traceback.print_exception(exc_type, exc_value, exc_traceback,
                                          limit=4, file=sys.stdout)
Пример #20
0
    def getGlobalOutputs(self):
        outputs = {}
        axes_domains = {"time": np.arange(self.dataInput.ny) * self.dataInput.tr}
        if pyhrf.__usemode__ == pyhrf.DEVEL:
            # output of design matrix:
            dMat = np.zeros_like(self.dataInput.varX[0, :, :])
            for ic, vx in enumerate(self.dataInput.varX):
                dMat += vx * (ic + 1)

            outputs["matX"] = xndarray(dMat, axes_names=["time", "P"], axes_domains=axes_domains, value_label="value")

            ad = axes_domains.copy()
            ad["condition"] = self.dataInput.cNames
            outputs["varX"] = xndarray(
                self.dataInput.varX.astype(np.int8),
                axes_names=["condition", "time", "P"],
                axes_domains=ad,
                value_label="value",
            )
        # if self.fit is not None:
        try:
            fit = self.computeFit()
            if self.dataInput.varMBY.ndim == 2:
                axes_names = ["time", "voxel"]
            else:  # multisession
                axes_names = ["session", "time", "voxel"]
            bold = xndarray(
                self.dataInput.varMBY.astype(np.float32),
                axes_names=axes_names,
                axes_domains=axes_domains,
                value_label="BOLD",
            )

            # TODO: outputs of paradigm
            # outputs of onsets, per condition:
            # get binary sequence sampled at TR
            # build a xndarray from it
            # build time axis values
            if pyhrf.__usemode__ == pyhrf.DEVEL:
                cfit = xndarray(
                    fit.astype(np.float32), axes_names=axes_names, axes_domains=axes_domains, value_label="BOLD"
                )
                # if self.dataInput.simulData is not None:

                # s = xndarray(self.dataInput.simulData.stimInduced,
                # axes_names=axes_names,
                # axes_domains=axes_domains,
                # value_label='BOLD')

                # outputs['fit'] = stack_cuboids([s,cfit], 'type',
                # ['simu', 'fit'])
                # else:
                outputs["bold_fit"] = stack_cuboids([bold, cfit], "stype", ["bold", "fit"])

            if 0 and pyhrf.__usemode__ == pyhrf.DEVEL:
                # print 'stack fit, bold'
                # outputs['fit'] = stack_cuboids([bold,cfit], 'type',
                # ['bold', 'fit'])

                outputs["bold"] = bold
                outputs["fit"] = cfit
                # e = np.sqrt((fit.astype(np.float32) - \
                #                 self.dataInput.varMBY.astype(np.float32))**2)
                # outputs['error'] = xndarray(e, axes_names=axes_names,
                #                            axes_domains=axes_domains,
                #                            value_label='Error')
                # outputs['rmse'] = xndarray(e.mean(0), axes_names=['voxel'],
                #                            value_label='Rmse')

        except NotImplementedError:
            print "Compute fit not implemented !"
            pass

        return outputs
Пример #21
0
    def outputResults_old2(self, results, output_dir, filter='.\A',):
        """
        Return: a tuple (dictionary of outputs, output file names)
        """
        if output_dir is None:
            return {}, []

        if not isinstance(results[0][0], FmriData, FmriGroupData):
            self.outputResults_back_compat(results, output_dir, filter)
            return {}, []

        pyhrf.verbose(1,'Building outputs from %d results ...' %len(results))
        pyhrf.verbose(6, 'results :')
        pyhrf.verbose.printDict(6, results, exclude=['xmlHandler'])

        to_pop = []
        for  i,r in enumerate(results[:]):
            roi_data,result,report = r
            roi_id = roi_data.get_roi_id()
            if report != 'ok':
                pyhrf.verbose(1, '-> Sampling crashed, roi %d!' \
                                  %roi_id)
                pyhrf.verbose(2, report)
                to_pop.insert(0,i)

            elif result is None:
                pyhrf.verbose(1, '-> Sampling crashed (result is None), '
                              'roi %d!' %roi_id)
                to_pop.insert(0,i)

        for i in to_pop:
            results.pop(i)

        if len(results) == 0:
            pyhrf.verbose(1, 'No more result to treat. Did everything crash ?')
            return {}, []

        target_shape = results[0][0].spatial_shape
        meta_data = results[0][0].meta_obj

        if len(target_shape) == 3: #Volumic data:
            targetAxes = MRI3Daxes #['axial','coronal', 'sagittal']
            ext = '.nii'
        else: #surfacic
            targetAxes = ['voxel']
            ext = '.gii'

        def genzip(gens):
            while True:
                yield [g.next() for g in gens]

        coutputs = {}
        output_fns = []

        pyhrf.verbose(1,'Get each ROI output ...')
        #print 'roi outputs:'
        if hasattr(results[0][1], 'getOutputs'):
            gen_outputs = [r[1].getOutputs() for r in results]
        else:
            gen_outputs = [r[1].iteritems() for r in results]

        data_rois = [r[0] for r in results]
        irois = [d.get_roi_id() for d in data_rois]
        for roi_outputs in genzip(gen_outputs):
            output_name = roi_outputs[0][0]
            pyhrf.verbose(3,'Merge output %s ...' %output_name)
            try:
                if roi_outputs[0][1].has_axis('voxel'):
                    pyhrf.verbose(5,'Merge as expansion ...')
                    dest_c = None
                    for data_roi,output in zip(data_rois,roi_outputs):
                        _, c = output
                        roi_mask = (data_roi.roiMask != data_roi.backgroundLabel)

                        dest_c = c.expand(roi_mask, 'voxel', targetAxes,
                                          dest=dest_c)
                else:
                    pyhrf.verbose(5,'Merge as stack (%d elements)...' \
                                      %len(irois))
                    c_to_stack = [roi_outputs[i][1] for i in np.argsort(irois)]
                    # print 'c_to_stack:', c_to_stack
                    # print 'sorted(irois):', sorted(irois)
                    dest_c = stack_cuboids(c_to_stack, domain=sorted(irois),
                                           axis='ROI')
            except Exception, e:
                print "Could not merge outputs for %s" %output_name
                print "Exception was:"
                print e
                #raise e #stop here
            if 0 and dest_c is not None:
                print '-> ', dest_c.data.shape
            output_fn = op.join(output_dir,output_name + ext)
            output_fn = add_prefix(output_fn, self.outPrefix)
            output_fns.append(output_fn)
            pyhrf.verbose(5,'Save output %s to %s' %(output_name, output_fn))
            try:
                dest_c.save(output_fn, meta_data=meta_data,
                            set_MRI_orientation=True)
            except Exception:
                print 'Could not save output "%s", error stack was:' \
                    %output_name
                exc_type, exc_value, exc_traceback = sys.exc_info()
                traceback.print_exception(exc_type, exc_value, exc_traceback,
                                          limit=4, file=sys.stdout)
            coutputs[output_name] = dest_c
Пример #22
0
    def getOutputs(self):

        # Backward compatibilty with older results
        if hasattr(self, 'axesNames'):
            self.axes_names = self.axesNames
        if hasattr(self, 'axesDomains'):
            self.axes_domains = self.axesDomains
        if hasattr(self, 'valueLabel'):
            self.value_label = self.valueLabel

        outputs = {}
        if self.axes_names is None:
            # print ' "%s" -> no axes_names defined' %self.name
            sh = (1,) if np.isscalar(
                self.finalValue) else self.finalValue.shape
            #an = ['axis%d'%i for i in xrange(self.finalValue.ndim)]
            an = ['axis%d' % i for i in xrange(len(sh))]
        else:
            an = self.axes_names

        if self.meanHistory is not None:
            outName = self.name + '_pm_history'
            if hasattr(self, 'obsHistoryIts'):
                axes_domains = {'iteration': self.obsHistoryIts}
            else:
                axes_domains = {}
            axes_domains.update(self.axes_domains)

            axes_names = ['iteration'] + an
            outputs[outName] = xndarray(self.meanHistory,
                                        axes_names=axes_names,
                                        axes_domains=axes_domains,
                                        value_label=self.value_label)

        if hasattr(self, 'smplHistory') and self.smplHistory is not None:
            axes_names = ['iteration'] + an
            outName = self.name + '_smpl_history'
            if hasattr(self, 'smplHistoryIts'):
                axes_domains = {'iteration': self.smplHistoryIts}
            else:
                axes_domains = {}
            axes_domains.update(self.axes_domains)
            outputs[outName] = xndarray(self.smplHistory,
                                        axes_names=axes_names,
                                        axes_domains=axes_domains,
                                        value_label=self.value_label)

            if hasattr(self, 'autocorrelation'):
                outName = self.name + '_smpl_autocorr'
                axes_names = ['lag'] + an
                outputs[outName] = xndarray(self.autocorrelation,
                                            axes_names=axes_names,
                                            axes_domains=self.axes_domains,
                                            value_label='acorr')

                outName = self.name + '_smpl_autocorr_test'
                outputs[outName] = xndarray(self.autocorrelation_test,
                                            axes_names=axes_names,
                                            axes_domains=self.axes_domains,
                                            value_label='acorr')

                outName = self.name + '_smpl_autocorr_pval'
                outputs[outName] = xndarray(self.autocorrelation_pvalue,
                                            axes_names=axes_names,
                                            axes_domains=self.axes_domains,
                                            value_label='pvalue')

                outName = self.name + '_smpl_autocorr_thresh'
                outputs[outName] = xndarray(np.array([self.autocorrelation_thresh]),
                                            value_label='acorr')

            if hasattr(self, 'median'):
                outName = self.name + '_post_median'
                outputs[outName] = xndarray(self.median,
                                            axes_names=self.axes_names,
                                            axes_domains=self.axes_domains,
                                            value_label='median')

        logger.info('%s final value:', self.name)
        logger.info(self.finalValue)
        if 1 and hasattr(self, 'error'):
            err = self.error ** .5
        else:
            err = None

        c = xndarray(self.get_final_value().astype(np.float32),
                     axes_names=self.axes_names,
                     axes_domains=self.axes_domains,
                     value_label=self.value_label)

        if self.trueValue is not None:
            c_true = xndarray(np.array(self.get_true_value()),
                              axes_names=self.axes_names,
                              axes_domains=self.axes_domains,
                              value_label=self.value_label)

            c = stack_cuboids([c, c_true], axis='type', domain=['estim', 'true'],
                              axis_pos='last')

        outputs[self.name + '_pm'] = c

        if ((err is not None) or ((err.size == 1) and (err != 0))):
            c_err = xndarray(self.error.astype(np.float32),
                             axes_names=self.axes_names,
                             axes_domains=self.axes_domains,
                             value_label=self.value_label)
            # c = stack_cuboids([c,c_err], axis='error', domain=['value','std'],
            #                   axis_pos='last')
            outputs[self.name + '_mcmc_var'] = c_err

        if hasattr(self, 'tracked_quantities'):
            for qname, q in self.tracked_quantities.iteritems():
                outputs[qname] = q.to_cuboid()

        if len(self.report_check_ft_val) > 0:
            r = self.report_check_ft_val
            outputs[self.name + '_abs_err'] = xndarray(r['abs_error'],
                                                       axes_names=self.axes_names,
                                                       axes_domains=self.axes_domains)

            outputs[self.name + '_rel_err'] = xndarray(r['rel_error'],
                                                       axes_names=self.axes_names,
                                                       axes_domains=self.axes_domains)
            on = self.name + '_inaccuracies'
            an = r['accuracy'][0]
            ad = {}
            if an is not None:
                ad = dict((a, self.axes_domains[a])
                          for a in an if self.axes_domains.has_key(a))
            inacc = np.bitwise_not(r['accuracy'][1]).astype(np.int8)
            outputs[on] = xndarray(inacc, axes_names=an, axes_domains=ad)

        return outputs